[ 510.407398] env[62914]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62914) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 510.407740] env[62914]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62914) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 510.407839] env[62914]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62914) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 510.408247] env[62914]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 510.509232] env[62914]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62914) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 510.520317] env[62914]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62914) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 511.125914] env[62914]: INFO nova.virt.driver [None req-995b7eb1-afed-43f2-8c7f-858e0a6ef6d5 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 511.200912] env[62914]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.201092] env[62914]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.201212] env[62914]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62914) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 514.461051] env[62914]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-bc0a99a7-9ec7-4bc8-9aa1-eb84a754969f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.478086] env[62914]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62914) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 514.478266] env[62914]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-a32b97b2-2bae-4151-9e3f-bfbd6770d331 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.505857] env[62914]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e5187. [ 514.506086] env[62914]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.305s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.506617] env[62914]: INFO nova.virt.vmwareapi.driver [None req-995b7eb1-afed-43f2-8c7f-858e0a6ef6d5 None None] VMware vCenter version: 7.0.3 [ 514.510156] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b292f8ab-6d9d-4501-8bf2-e9de9d6882f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.532753] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15ad6a2-abb9-45f9-94fa-9f557beaabbc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.539538] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4258edce-7bfb-40ea-b4e9-709d327e9db0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.546852] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf4eca3-32ec-4491-89fc-fb0d919ecca1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.560860] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3cd16d-6cf3-496c-ad4b-e2669444b4c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.567744] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14582a4a-45b9-47dd-a70c-988d66af0993 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.599633] env[62914]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-35c7fea7-c83b-4e99-ab73-d7659456a62c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.605718] env[62914]: DEBUG nova.virt.vmwareapi.driver [None req-995b7eb1-afed-43f2-8c7f-858e0a6ef6d5 None None] Extension org.openstack.compute already exists. {{(pid=62914) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 514.608538] env[62914]: INFO nova.compute.provider_config [None req-995b7eb1-afed-43f2-8c7f-858e0a6ef6d5 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 515.111970] env[62914]: DEBUG nova.context [None req-995b7eb1-afed-43f2-8c7f-858e0a6ef6d5 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),0dcf94bf-c9dd-4817-9d5c-58080813432f(cell1) {{(pid=62914) load_cells /opt/stack/nova/nova/context.py:464}} [ 515.114212] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.114472] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.115189] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.115660] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Acquiring lock "0dcf94bf-c9dd-4817-9d5c-58080813432f" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.115852] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Lock "0dcf94bf-c9dd-4817-9d5c-58080813432f" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.117215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Lock "0dcf94bf-c9dd-4817-9d5c-58080813432f" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.139622] env[62914]: INFO dbcounter [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Registered counter for database nova_cell0 [ 515.148350] env[62914]: INFO dbcounter [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Registered counter for database nova_cell1 [ 515.151713] env[62914]: DEBUG oslo_db.sqlalchemy.engines [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62914) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 515.152168] env[62914]: DEBUG oslo_db.sqlalchemy.engines [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62914) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 515.157334] env[62914]: ERROR nova.db.main.api [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.157334] env[62914]: result = function(*args, **kwargs) [ 515.157334] env[62914]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.157334] env[62914]: return func(*args, **kwargs) [ 515.157334] env[62914]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 515.157334] env[62914]: result = fn(*args, **kwargs) [ 515.157334] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 515.157334] env[62914]: return f(*args, **kwargs) [ 515.157334] env[62914]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 515.157334] env[62914]: return db.service_get_minimum_version(context, binaries) [ 515.157334] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 515.157334] env[62914]: _check_db_access() [ 515.157334] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 515.157334] env[62914]: stacktrace = ''.join(traceback.format_stack()) [ 515.157334] env[62914]: [ 515.158228] env[62914]: ERROR nova.db.main.api [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.158228] env[62914]: result = function(*args, **kwargs) [ 515.158228] env[62914]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.158228] env[62914]: return func(*args, **kwargs) [ 515.158228] env[62914]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 515.158228] env[62914]: result = fn(*args, **kwargs) [ 515.158228] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 515.158228] env[62914]: return f(*args, **kwargs) [ 515.158228] env[62914]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 515.158228] env[62914]: return db.service_get_minimum_version(context, binaries) [ 515.158228] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 515.158228] env[62914]: _check_db_access() [ 515.158228] env[62914]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 515.158228] env[62914]: stacktrace = ''.join(traceback.format_stack()) [ 515.158228] env[62914]: [ 515.158637] env[62914]: WARNING nova.objects.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Failed to get minimum service version for cell 0dcf94bf-c9dd-4817-9d5c-58080813432f [ 515.158758] env[62914]: WARNING nova.objects.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 515.159220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Acquiring lock "singleton_lock" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.159386] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Acquired lock "singleton_lock" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.159635] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Releasing lock "singleton_lock" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 515.159967] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Full set of CONF: {{(pid=62914) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 515.160126] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ******************************************************************************** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 515.160254] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Configuration options gathered from: {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 515.160391] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 515.160584] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 515.160712] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ================================================================================ {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 515.160925] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] allow_resize_to_same_host = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.161113] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] arq_binding_timeout = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.161247] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] backdoor_port = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.161412] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] backdoor_socket = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.161680] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] block_device_allocate_retries = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.161876] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] block_device_allocate_retries_interval = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162067] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cert = self.pem {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162240] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162414] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute_monitors = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162583] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] config_dir = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162755] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] config_drive_format = iso9660 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.162905] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163075] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] config_source = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163254] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] console_host = devstack {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163422] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] control_exchange = nova {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163582] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cpu_allocation_ratio = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163743] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] daemon = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.163911] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] debug = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.164091] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_access_ip_network_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.164261] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_availability_zone = nova {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.164419] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_ephemeral_format = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.164580] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_green_pool_size = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.164824] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165025] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] default_schedule_zone = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165197] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] disk_allocation_ratio = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165362] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] enable_new_services = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165541] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] enabled_apis = ['osapi_compute'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165707] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] enabled_ssl_apis = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.165871] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] flat_injected = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.166041] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] force_config_drive = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.166240] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] force_raw_images = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.166423] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] graceful_shutdown_timeout = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.166587] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] heal_instance_info_cache_interval = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.166804] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] host = cpu-1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167011] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167199] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] initial_disk_allocation_ratio = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167367] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] initial_ram_allocation_ratio = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167583] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167749] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_build_timeout = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.167913] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_delete_interval = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168095] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_format = [instance: %(uuid)s] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168267] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_name_template = instance-%08x {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168430] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_usage_audit = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168602] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_usage_audit_period = month {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168768] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.168932] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] instances_path = /opt/stack/data/nova/instances {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169113] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] internal_service_availability_zone = internal {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169273] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] key = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169432] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] live_migration_retry_count = 30 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169600] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_color = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169766] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_config_append = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.169931] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170103] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_dir = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170267] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170392] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_options = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170551] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_rotate_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170717] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_rotate_interval_type = days {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.170882] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] log_rotation_type = none {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171030] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171166] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171337] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171501] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171630] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171790] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] long_rpc_timeout = 1800 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.171954] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_concurrent_builds = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172126] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_concurrent_live_migrations = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172292] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_concurrent_snapshots = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172445] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_local_block_devices = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172601] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_logfile_count = 30 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172758] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] max_logfile_size_mb = 200 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.172916] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] maximum_instance_delete_attempts = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173097] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metadata_listen = 0.0.0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173269] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metadata_listen_port = 8775 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173437] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metadata_workers = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173596] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] migrate_max_retries = -1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173762] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] mkisofs_cmd = genisoimage {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.173970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] my_block_storage_ip = 10.180.1.21 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174122] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] my_ip = 10.180.1.21 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174289] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] network_allocate_retries = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174469] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174639] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] osapi_compute_listen = 0.0.0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174817] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] osapi_compute_listen_port = 8774 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.174981] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] osapi_compute_unique_server_name_scope = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.175165] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] osapi_compute_workers = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.175332] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] password_length = 12 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.175501] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] periodic_enable = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.175665] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] periodic_fuzzy_delay = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.175836] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] pointer_model = usbtablet {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176026] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] preallocate_images = none {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176195] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] publish_errors = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176327] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] pybasedir = /opt/stack/nova {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176486] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ram_allocation_ratio = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176648] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rate_limit_burst = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176814] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rate_limit_except_level = CRITICAL {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.176981] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rate_limit_interval = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177155] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reboot_timeout = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177316] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reclaim_instance_interval = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177474] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] record = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177643] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reimage_timeout_per_gb = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177808] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] report_interval = 120 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.177977] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rescue_timeout = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178165] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reserved_host_cpus = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178330] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reserved_host_disk_mb = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178490] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reserved_host_memory_mb = 512 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178654] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] reserved_huge_pages = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178816] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] resize_confirm_window = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.178977] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] resize_fs_using_block_device = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179151] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] resume_guests_state_on_host_boot = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179319] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179483] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] rpc_response_timeout = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179647] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] run_external_periodic_tasks = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179813] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] running_deleted_instance_action = reap {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.179975] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] running_deleted_instance_poll_interval = 1800 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180151] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] running_deleted_instance_timeout = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180313] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler_instance_sync_interval = 120 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180481] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_down_time = 720 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180651] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] servicegroup_driver = db {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180808] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] shell_completion = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.180970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] shelved_offload_time = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.181144] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] shelved_poll_interval = 3600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.181312] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] shutdown_timeout = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.181473] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] source_is_ipv6 = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.181632] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ssl_only = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.181886] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182069] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] sync_power_state_interval = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182234] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] sync_power_state_pool_size = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182401] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] syslog_log_facility = LOG_USER {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182559] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] tempdir = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182718] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] timeout_nbd = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.182884] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] transport_url = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183060] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] update_resources_interval = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183225] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_cow_images = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183387] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_eventlog = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183548] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_journal = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183708] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_json = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.183868] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_rootwrap_daemon = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184037] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_stderr = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184205] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] use_syslog = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184362] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vcpu_pin_set = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184528] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plugging_is_fatal = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184694] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plugging_timeout = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.184870] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] virt_mkfs = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.185037] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] volume_usage_poll_interval = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.185206] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] watch_log_file = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.185375] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] web = /usr/share/spice-html5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 515.185560] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_concurrency.disable_process_locking = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.185850] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186046] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186222] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186395] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186566] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186731] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.186946] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.auth_strategy = keystone {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187107] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.compute_link_prefix = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187287] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187460] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.dhcp_domain = novalocal {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187633] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.enable_instance_password = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187799] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.glance_link_prefix = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.187968] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188156] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188324] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.instance_list_per_project_cells = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188489] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.list_records_by_skipping_down_cells = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188655] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.local_metadata_per_cell = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188824] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.max_limit = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.188996] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.metadata_cache_expiration = 15 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.189189] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.neutron_default_tenant_id = default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.189363] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.response_validation = warn {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.189532] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.use_neutron_default_nets = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.189710] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.189871] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190056] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190240] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190417] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_dynamic_targets = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190584] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_jsonfile_path = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190769] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.190968] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.backend = dogpile.cache.memcached {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.191160] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.backend_argument = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.191329] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.config_prefix = cache.oslo {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.191503] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.dead_timeout = 60.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.191830] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.debug_cache_backend = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192094] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.enable_retry_client = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192284] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.enable_socket_keepalive = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192467] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.enabled = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192643] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.enforce_fips_mode = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192814] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.expiration_time = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.192985] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.hashclient_retry_attempts = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.193179] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.hashclient_retry_delay = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.193375] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_dead_retry = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.193550] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_password = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.193720] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.193887] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194068] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_pool_maxsize = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194239] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194405] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_sasl_enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194587] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194795] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_socket_timeout = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.194921] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.memcache_username = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195107] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.proxies = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195278] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_db = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195439] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_password = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195613] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_sentinel_service_name = mymaster {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195796] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.195971] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_server = localhost:6379 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.196159] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_socket_timeout = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.196324] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.redis_username = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.196487] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.retry_attempts = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.196655] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.retry_delay = 0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.196818] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.socket_keepalive_count = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197016] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.socket_keepalive_idle = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197203] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.socket_keepalive_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197376] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.tls_allowed_ciphers = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197566] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.tls_cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197731] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.tls_certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.197910] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.tls_enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.198157] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cache.tls_keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.198348] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.198527] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.auth_type = password {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.198693] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.198874] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.catalog_info = volumev3::publicURL {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199050] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199235] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199408] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.cross_az_attach = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199577] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.debug = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199742] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.endpoint_template = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.199911] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.http_retries = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200093] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200257] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200429] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.os_region_name = RegionOne {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200597] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200760] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cinder.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.200935] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201114] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.cpu_dedicated_set = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201276] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.cpu_shared_set = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201446] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.image_type_exclude_list = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201611] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201777] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.max_concurrent_disk_ops = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.201942] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.max_disk_devices_to_attach = -1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202122] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202299] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202462] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.resource_provider_association_refresh = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202629] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202798] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.shutdown_retry_interval = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.202983] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.203183] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] conductor.workers = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.203367] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] console.allowed_origins = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.203534] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] console.ssl_ciphers = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.203704] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] console.ssl_minimum_version = default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.203875] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] consoleauth.enforce_session_timeout = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204058] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] consoleauth.token_ttl = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204234] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204394] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204564] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204727] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.204894] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205076] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205249] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205445] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205619] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205782] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.205943] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206120] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206283] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206452] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.service_type = accelerator {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206617] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206779] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.206973] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.207156] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.207344] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.207511] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] cyborg.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.207694] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.backend = sqlalchemy {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.207900] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.connection = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208046] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.connection_debug = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208222] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.connection_parameters = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208399] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.connection_recycle_time = 3600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208566] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.connection_trace = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208733] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.db_inc_retry_interval = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.208899] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.db_max_retries = 20 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209083] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.db_max_retry_interval = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209275] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.db_retry_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209457] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.max_overflow = 50 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209623] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.max_pool_size = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209788] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.max_retries = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.209958] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210134] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.mysql_wsrep_sync_wait = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210294] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.pool_timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210456] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.retry_interval = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210614] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.slave_connection = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210777] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.sqlite_synchronous = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.210939] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] database.use_db_reconnect = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211140] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.backend = sqlalchemy {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211311] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.connection = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211478] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.connection_debug = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211647] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.connection_parameters = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211813] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.connection_recycle_time = 3600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.211977] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.connection_trace = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212158] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.db_inc_retry_interval = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212323] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.db_max_retries = 20 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212485] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.db_max_retry_interval = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212647] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.db_retry_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212809] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.max_overflow = 50 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.212970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.max_pool_size = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213155] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.max_retries = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213325] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213482] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213643] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.pool_timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213808] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.retry_interval = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.213970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.slave_connection = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.214145] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] api_database.sqlite_synchronous = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.214323] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] devices.enabled_mdev_types = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.214501] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.214675] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ephemeral_storage_encryption.default_format = luks {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.214843] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ephemeral_storage_encryption.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215017] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215200] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.api_servers = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215368] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215530] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215693] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.215853] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216027] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216199] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.debug = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216369] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.default_trusted_certificate_ids = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216533] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.enable_certificate_validation = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216697] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.enable_rbd_download = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.216857] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217073] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217246] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217431] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217602] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217768] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.num_retries = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.217938] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.rbd_ceph_conf = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218152] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.rbd_connect_timeout = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218330] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.rbd_pool = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218498] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.rbd_user = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218661] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218824] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.218986] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.219173] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.service_type = image {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.219338] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.219501] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.219663] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.219824] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.220028] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.220242] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.verify_glance_signatures = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.220415] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] glance.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.220587] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] guestfs.debug = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.220759] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] mks.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.221151] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.221376] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.manager_interval = 2400 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.221560] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.precache_concurrency = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.221737] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.remove_unused_base_images = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.221908] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222094] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222278] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] image_cache.subdirectory_name = _base {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222461] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.api_max_retries = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222628] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.api_retry_interval = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222794] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.222961] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.auth_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223138] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223305] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223471] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223638] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.conductor_group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223803] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.223966] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224144] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224313] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224473] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224633] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224794] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.224961] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.peer_list = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225139] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225302] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225467] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.serial_console_state_timeout = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225630] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225801] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.service_type = baremetal {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.225965] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.shard = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.226146] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.226309] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.226471] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.226632] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.226813] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227015] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ironic.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227213] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227394] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] key_manager.fixed_key = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227578] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227743] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.barbican_api_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.227910] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.barbican_endpoint = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228102] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.barbican_endpoint_type = public {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228266] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.barbican_region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228427] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228588] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228754] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.228918] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229093] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229285] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.number_of_retries = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229464] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.retry_delay = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229631] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.send_service_user_token = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229797] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.229959] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230141] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.verify_ssl = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230305] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican.verify_ssl_path = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230474] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230639] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.auth_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230800] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.230961] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231141] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231308] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231468] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231633] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231792] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] barbican_service_user.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.231962] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.approle_role_id = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232139] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.approle_secret_id = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232314] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.kv_mountpoint = secret {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232474] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.kv_path = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232643] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.kv_version = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232807] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.namespace = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.232970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.root_token_id = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.233149] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.ssl_ca_crt_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.233341] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.timeout = 60.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.233530] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.use_ssl = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.233705] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.233883] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234066] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.auth_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234233] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234394] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234560] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234720] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.234884] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235060] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235227] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235387] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235546] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235706] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.235866] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236036] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236205] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236377] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.service_type = identity {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236543] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236703] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.236869] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237075] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237269] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237435] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] keystone.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237640] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.connection_uri = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237805] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_mode = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.237977] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_model_extra_flags = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.238163] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_models = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.238336] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_power_governor_high = performance {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.238504] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_power_governor_low = powersave {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.238667] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_power_management = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.238839] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239008] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.device_detach_attempts = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239184] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.device_detach_timeout = 20 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239391] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.disk_cachemodes = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239560] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.disk_prefix = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239729] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.enabled_perf_events = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.239896] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.file_backed_memory = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240087] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.gid_maps = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240256] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.hw_disk_discard = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240419] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.hw_machine_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240592] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_rbd_ceph_conf = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240757] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.240921] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241108] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_rbd_glance_store_name = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241284] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_rbd_pool = rbd {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241455] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_type = default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241615] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.images_volume_group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241778] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.inject_key = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.241939] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.inject_partition = -2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242113] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.inject_password = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242278] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.iscsi_iface = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242441] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.iser_use_multipath = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242605] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_bandwidth = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242765] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.242929] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_downtime = 500 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243105] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243274] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243462] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_inbound_addr = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243627] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243789] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_permit_post_copy = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.243949] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_scheme = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244138] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_timeout_action = abort {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244310] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_tunnelled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244476] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_uri = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244640] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.live_migration_with_native_tls = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244799] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.max_queues = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.244965] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.245223] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.245420] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.nfs_mount_options = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.246148] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.246344] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.246520] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_iser_scan_tries = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.246687] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_memory_encrypted_guests = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.246855] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.247066] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_pcie_ports = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.247252] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.num_volume_scan_tries = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.247427] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.pmem_namespaces = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.247592] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.quobyte_client_cfg = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.247889] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248089] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rbd_connect_timeout = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248263] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248431] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248596] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rbd_secret_uuid = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248757] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rbd_user = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.248922] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249108] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.remote_filesystem_transport = ssh {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249301] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rescue_image_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249479] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rescue_kernel_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249639] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rescue_ramdisk_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249811] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.249974] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.rx_queue_size = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.250160] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.smbfs_mount_options = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.250447] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.250621] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.snapshot_compression = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.250785] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.snapshot_image_format = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251012] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251195] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.sparse_logical_volumes = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251382] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.swtpm_enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251560] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.swtpm_group = tss {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251729] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.swtpm_user = tss {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.251899] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.sysinfo_serial = unique {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252083] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.tb_cache_size = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252246] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.tx_queue_size = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252412] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.uid_maps = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252578] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.use_virtio_for_bridges = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252747] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.virt_type = kvm {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.252917] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.volume_clear = zero {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.253092] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.volume_clear_size = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.253263] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.volume_use_multipath = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.253468] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_cache_path = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.253675] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.253850] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_mount_group = qemu {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.254029] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_mount_opts = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.254209] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.254490] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.254667] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.vzstorage_mount_user = stack {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.254838] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255026] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255208] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.auth_type = password {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255410] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255615] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255791] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.255954] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.256145] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.256322] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.default_floating_pool = public {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.256484] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.256649] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.extension_sync_interval = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.256811] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.http_retries = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.257027] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.257209] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.257380] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.257581] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.257803] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258018] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.ovs_bridge = br-int {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258209] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.physnets = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258385] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.region_name = RegionOne {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258551] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258720] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.service_metadata_proxy = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.258885] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259072] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.service_type = network {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259244] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259405] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259563] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259722] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.259906] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260086] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] neutron.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260268] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] notifications.bdms_in_notifications = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260449] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] notifications.default_level = INFO {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260626] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] notifications.notification_format = unversioned {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260795] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] notifications.notify_on_state_change = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.260974] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.261175] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] pci.alias = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.261354] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] pci.device_spec = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.261523] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] pci.report_in_placement = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.261700] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.261878] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.auth_type = password {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262062] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262229] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262392] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262558] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262720] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.262885] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263059] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.default_domain_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263225] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.default_domain_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263415] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.domain_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263589] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.domain_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263753] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.263919] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264095] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264262] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264422] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264593] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.password = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264765] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.project_domain_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.264926] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.project_domain_name = Default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.265110] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.project_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.265288] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.project_name = service {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.265521] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.region_name = RegionOne {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.265713] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.265880] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266069] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.service_type = placement {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266241] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266403] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266570] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266732] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.system_scope = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.266907] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267095] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.trust_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267264] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.user_domain_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267435] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.user_domain_name = Default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267600] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.user_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267777] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.username = nova {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.267997] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.268168] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] placement.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.268352] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.cores = 20 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.268517] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.count_usage_from_placement = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.268692] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.268875] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.injected_file_content_bytes = 10240 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269056] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.injected_file_path_length = 255 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269242] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.injected_files = 5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269415] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.instances = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269587] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.key_pairs = 100 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269759] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.metadata_items = 128 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.269932] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.ram = 51200 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270114] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.recheck_quota = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270287] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.server_group_members = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270455] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] quota.server_groups = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270629] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270795] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.270966] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.image_metadata_prefilter = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271140] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271307] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.max_attempts = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271471] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.max_placement_results = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271644] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271808] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.query_placement_for_image_type_support = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.271970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.272161] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] scheduler.workers = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.272337] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.272507] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.272687] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.272858] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273037] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273208] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273375] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273566] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273738] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.host_subset_size = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.273906] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274078] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274245] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274410] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.isolated_hosts = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274581] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.isolated_images = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274746] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.274915] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275099] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275395] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.pci_in_placement = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275446] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275567] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275728] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.275889] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276076] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276241] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276404] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.track_instance_changes = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276582] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276756] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metrics.required = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.276942] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metrics.weight_multiplier = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.277138] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.277311] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] metrics.weight_setting = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.277636] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.277817] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278032] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.port_range = 10000:20000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278244] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278426] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278605] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] serial_console.serialproxy_port = 6083 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278779] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.278956] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.auth_type = password {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279136] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279301] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279467] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279631] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279794] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.279968] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.send_service_user_token = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.280147] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.280312] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] service_user.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.280488] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.agent_enabled = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.280653] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.280992] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.281264] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.281454] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.html5proxy_port = 6082 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.281620] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.image_compression = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.281785] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.jpeg_compression = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.281951] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.playback_compression = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282145] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.require_secure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282320] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.server_listen = 127.0.0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282491] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282660] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.streaming_mode = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282821] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] spice.zlib_compression = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.282997] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] upgrade_levels.baseapi = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.283187] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] upgrade_levels.compute = auto {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.283357] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] upgrade_levels.conductor = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.283521] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] upgrade_levels.scheduler = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.283690] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.283854] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.auth_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284024] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284191] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284355] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284517] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284675] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284837] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.284995] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vendordata_dynamic_auth.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.285188] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.api_retry_count = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.285354] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.ca_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.285525] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.cache_prefix = devstack-image-cache {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.285692] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.cluster_name = testcl1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.285858] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.connection_pool_size = 10 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286035] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.console_delay_seconds = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286213] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.datastore_regex = ^datastore.* {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286435] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286608] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.host_password = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286781] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.host_port = 443 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.286971] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.host_username = administrator@vsphere.local {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287168] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.insecure = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287334] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.integration_bridge = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287499] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.maximum_objects = 100 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287660] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.pbm_default_policy = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287825] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.pbm_enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.287999] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.pbm_wsdl_location = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.288188] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.288355] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.serial_port_proxy_uri = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.288515] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.serial_port_service_uri = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.288685] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.task_poll_interval = 0.5 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.288853] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.use_linked_clone = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.289037] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.vnc_keymap = en-us {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.289211] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.vnc_port = 5900 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.289375] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vmware.vnc_port_total = 10000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.289564] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.auth_schemes = ['none'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.289741] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.290086] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.290281] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.290466] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.novncproxy_port = 6080 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.290663] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.server_listen = 127.0.0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.290841] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291012] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.vencrypt_ca_certs = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291187] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.vencrypt_client_cert = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291349] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vnc.vencrypt_client_key = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291532] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291700] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_deep_image_inspection = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.291866] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292042] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292214] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292380] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.disable_rootwrap = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292545] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.enable_numa_live_migration = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292709] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.292870] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293045] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293216] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.libvirt_disable_apic = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293379] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293545] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293708] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.293870] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294045] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294216] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294381] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294546] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294711] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.294909] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295116] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295295] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.client_socket_timeout = 900 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295465] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.default_pool_size = 1000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295636] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.keep_alive = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295807] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.max_header_line = 16384 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.295975] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.secure_proxy_ssl_header = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.296155] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.ssl_ca_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.296320] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.ssl_cert_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.296482] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.ssl_key_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.296648] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.tcp_keepidle = 600 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.296824] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.297038] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] zvm.ca_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.297219] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] zvm.cloud_connector_url = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.297532] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.297709] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] zvm.reachable_timeout = 300 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.297907] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.enforce_new_defaults = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.298337] env[62914]: WARNING oslo_config.cfg [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 515.298525] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.enforce_scope = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.298704] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.policy_default_rule = default {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.298894] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299091] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.policy_file = policy.yaml {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299280] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299446] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299608] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299771] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.299936] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300125] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300307] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300485] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.connection_string = messaging:// {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300659] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.enabled = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300830] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.es_doc_type = notification {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.300997] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.es_scroll_size = 10000 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.301184] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.es_scroll_time = 2m {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.301350] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.filter_error_trace = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.301521] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.hmac_keys = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.301688] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.sentinel_service_name = mymaster {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.301858] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.socket_timeout = 0.1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302031] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.trace_requests = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302208] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler.trace_sqlalchemy = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302392] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler_jaeger.process_tags = {} {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302555] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler_jaeger.service_name_prefix = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302721] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] profiler_otlp.service_name_prefix = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.302890] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] remote_debug.host = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303063] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] remote_debug.port = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303252] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303417] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303581] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303742] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.303905] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304078] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304245] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304408] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304571] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304738] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.304935] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305136] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305310] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305484] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305654] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305822] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.305988] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.306185] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.306351] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.306516] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.306682] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.306848] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307054] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307237] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307403] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307567] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307732] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.307896] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308077] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308252] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308428] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308599] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308764] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.308937] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.309351] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.ssl_version = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.309351] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.309493] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.309666] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_notifications.retry = -1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.309854] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310054] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_messaging_notifications.transport_url = **** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310237] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.auth_section = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310404] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.auth_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310567] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.cafile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310729] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.certfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.310893] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.collect_timing = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311506] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.connect_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311506] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.connect_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311506] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.endpoint_id = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311651] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.endpoint_override = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311685] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.insecure = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311838] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.keyfile = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.311996] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.max_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312170] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.min_version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312329] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.region_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312492] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.retriable_status_codes = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312651] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.service_name = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312809] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.service_type = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.312973] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.split_loggers = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313146] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.status_code_retries = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313310] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.status_code_retry_delay = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313468] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.timeout = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313628] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.valid_interfaces = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313784] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_limit.version = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.313950] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_reports.file_event_handler = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.314139] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.314301] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] oslo_reports.log_dir = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.314475] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.314637] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.314812] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315046] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315230] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315394] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315578] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315733] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.315895] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316075] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316243] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316405] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] vif_plug_ovs_privileged.user = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316577] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.flat_interface = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316765] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.316970] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.317170] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.317348] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.317524] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.317696] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.317861] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318063] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318243] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.isolate_vif = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318417] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318587] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318759] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.318932] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.ovsdb_interface = native {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319110] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_vif_ovs.per_port_bridge = False {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319282] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_brick.lock_path = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319451] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319614] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319787] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.capabilities = [21] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.319949] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320123] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.helper_command = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320292] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320455] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320615] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] privsep_osbrick.user = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320790] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.320954] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.group = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.321227] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.helper_command = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.321288] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.321449] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.321606] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] nova_sys_admin.user = None {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 515.321737] env[62914]: DEBUG oslo_service.service [None req-6a364e8c-6601-44b4-b8e6-66fbd68c768c None None] ******************************************************************************** {{(pid=62914) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 515.322265] env[62914]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 515.826191] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Getting list of instances from cluster (obj){ [ 515.826191] env[62914]: value = "domain-c8" [ 515.826191] env[62914]: _type = "ClusterComputeResource" [ 515.826191] env[62914]: } {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 515.827418] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74749d8-b343-4144-90a2-8ecb9704c7d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.836247] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Got total of 0 instances {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 515.836802] env[62914]: WARNING nova.virt.vmwareapi.driver [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 515.837339] env[62914]: INFO nova.virt.node [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Generated node identity f2f7a014-852b-4b37-9610-c5761f4b0175 [ 515.837580] env[62914]: INFO nova.virt.node [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Wrote node identity f2f7a014-852b-4b37-9610-c5761f4b0175 to /opt/stack/data/n-cpu-1/compute_id [ 516.340571] env[62914]: WARNING nova.compute.manager [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Compute nodes ['f2f7a014-852b-4b37-9610-c5761f4b0175'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 517.346786] env[62914]: INFO nova.compute.manager [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 518.352413] env[62914]: WARNING nova.compute.manager [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 518.352783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.352892] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.353037] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.353197] env[62914]: DEBUG nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 518.354141] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f15df73-838c-4cea-a4ef-676c03ae0171 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.363660] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d484098e-e102-4792-9601-2182ad2e5917 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.377799] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabf8bfc-bd5b-41b7-aa8c-e18a363d5464 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.385475] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af3b476-7c49-4733-bd6c-0a1711407118 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.415302] env[62914]: DEBUG nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180594MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 518.415463] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.415667] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.918359] env[62914]: WARNING nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] No compute node record for cpu-1:f2f7a014-852b-4b37-9610-c5761f4b0175: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host f2f7a014-852b-4b37-9610-c5761f4b0175 could not be found. [ 519.425151] env[62914]: INFO nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: f2f7a014-852b-4b37-9610-c5761f4b0175 [ 520.932778] env[62914]: DEBUG nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 520.933331] env[62914]: DEBUG nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 521.090728] env[62914]: INFO nova.scheduler.client.report [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] [req-280e9cc3-6a09-4995-a562-b966a64dd065] Created resource provider record via placement API for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 521.105629] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae782097-a799-4687-a8be-1d05731eeab4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.114285] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da885f4a-cd55-4526-a65f-d5abd2bffda0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.144415] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8de521-4a1d-446c-bf66-1165bd0622f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.152593] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1d5b2e-80fd-48a3-8b99-859b3b2912ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.166483] env[62914]: DEBUG nova.compute.provider_tree [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 521.702782] env[62914]: DEBUG nova.scheduler.client.report [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Updated inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 521.703079] env[62914]: DEBUG nova.compute.provider_tree [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 0 to 1 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 521.703207] env[62914]: DEBUG nova.compute.provider_tree [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 521.759714] env[62914]: DEBUG nova.compute.provider_tree [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 1 to 2 during operation: update_traits {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 522.264558] env[62914]: DEBUG nova.compute.resource_tracker [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 522.264950] env[62914]: DEBUG oslo_concurrency.lockutils [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.849s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.264950] env[62914]: DEBUG nova.service [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Creating RPC server for service compute {{(pid=62914) start /opt/stack/nova/nova/service.py:186}} [ 522.279022] env[62914]: DEBUG nova.service [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] Join ServiceGroup membership for this service compute {{(pid=62914) start /opt/stack/nova/nova/service.py:203}} [ 522.279228] env[62914]: DEBUG nova.servicegroup.drivers.db [None req-994735ca-b147-4008-b452-f95816c5fcd4 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62914) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 548.283635] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_power_states {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.789549] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Getting list of instances from cluster (obj){ [ 548.789549] env[62914]: value = "domain-c8" [ 548.789549] env[62914]: _type = "ClusterComputeResource" [ 548.789549] env[62914]: } {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 548.790774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6e2608-a56a-4f43-98e9-bb084a934047 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.802755] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Got total of 0 instances {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 548.802993] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.803331] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Getting list of instances from cluster (obj){ [ 548.803331] env[62914]: value = "domain-c8" [ 548.803331] env[62914]: _type = "ClusterComputeResource" [ 548.803331] env[62914]: } {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 548.804246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3fdbd1-fae1-45c7-a9c7-11b719c82af0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.813787] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Got total of 0 instances {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 563.342211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.342211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.846370] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 564.400953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.401252] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.403798] env[62914]: INFO nova.compute.claims [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.472175] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.472175] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.980712] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 564.993250] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "52097338-887e-4c79-8413-abfd7ea26c96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.993517] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "52097338-887e-4c79-8413-abfd7ea26c96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.408322] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "a4fca617-da38-4913-b2c8-a2921da6db56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.408673] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.496411] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 565.517455] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.557821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e729ec-4865-40c8-80a5-552d0940674e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.570532] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5414f2-ee49-44b2-9b28-3cd26c601f34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.619174] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1388c64-ab02-48f4-9e70-964d5fb0e2f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.632480] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01b8279-299d-44bb-8882-b63263785c82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.651125] env[62914]: DEBUG nova.compute.provider_tree [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.911869] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 566.030858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.156190] env[62914]: DEBUG nova.scheduler.client.report [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.337074] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.337490] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.448195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.667419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.668280] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 566.674146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.159s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.676934] env[62914]: INFO nova.compute.claims [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.840648] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 567.184254] env[62914]: DEBUG nova.compute.utils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.188415] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 567.188736] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.387708] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.602748] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "db31a794-3928-41bb-afd8-14fae9357654" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.603028] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.696287] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 567.892263] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34358985-766c-4a40-8672-38a2ecbc9be3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.903648] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c48be16-7de6-4b67-998f-48005100352d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.947518] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9cfc29-b84b-4de3-9006-5341afea3f50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.960353] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f181251-d4c1-439f-b705-f2a18d6f51bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.976506] env[62914]: DEBUG nova.compute.provider_tree [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.015069] env[62914]: DEBUG nova.policy [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '485c72c816cf49ef851ce5ed15d0aadc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e7977afd02c4940b9ffaefb78f5de34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 568.106194] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 568.481539] env[62914]: DEBUG nova.scheduler.client.report [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.579313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.579790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.658656] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.710166] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 568.751234] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 568.751482] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 568.751634] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.751808] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 568.751950] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.753091] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 568.753367] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 568.753539] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 568.755375] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 568.755572] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 568.756372] env[62914]: DEBUG nova.virt.hardware [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 568.756738] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc0fcd1-e18d-4085-849e-1857681d0a21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.769586] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52591cca-26c1-4614-925d-484b02aea7f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.797144] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac229a2-1d18-4f09-9675-4becc04613a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.989371] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.989371] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 568.993744] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.963s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.000019] env[62914]: INFO nova.compute.claims [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.083323] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 569.503538] env[62914]: DEBUG nova.compute.utils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.510224] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 569.510592] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.614402] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.703642] env[62914]: DEBUG nova.policy [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a426c3bd7944e69bacce135b47629f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e460060822e4eda931ae402635e9eb6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 569.952776] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Successfully created port: 90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.013163] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 570.227354] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c68d73-1abd-4ee0-b82e-23b0c8cfd241 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.241615] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f1c42f-f654-4c4e-bcbc-59d5555ecd00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.276840] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de66e4ba-b830-4639-829e-7b63d904daf8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.286213] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40837da8-fa47-4b60-b18e-f0351d69c067 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.309839] env[62914]: DEBUG nova.compute.provider_tree [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.579562] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.579820] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.579982] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 570.580370] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Rebuilding the list of instances to heal {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 570.817380] env[62914]: DEBUG nova.scheduler.client.report [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.021815] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 571.052741] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.053062] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.053424] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.053500] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.053644] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.053892] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.054384] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.054548] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.054964] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.055179] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.055452] env[62914]: DEBUG nova.virt.hardware [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.057086] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e04f392-b901-4b92-8dc8-d2aa2fc7ba44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.068339] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b0aa5f-408c-4dc2-9f19-446d72950800 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.092468] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 571.092633] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 571.092762] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 571.093181] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Didn't find any instances for network info cache update. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10090}} [ 571.093181] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.093740] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.093740] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.094059] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.094286] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.094478] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.094645] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 571.094792] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 571.212719] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Successfully created port: 59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.323497] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.324033] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 571.327252] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.879s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.328766] env[62914]: INFO nova.compute.claims [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.598859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.834311] env[62914]: DEBUG nova.compute.utils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.847737] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 571.848080] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 572.187915] env[62914]: DEBUG nova.policy [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6dc3dcfc0de546dcb49d7d88d6432a1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a48cfbcf1a492cbbca942d7ddb570d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.343279] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 572.582042] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f2f5a0-62b1-4b19-8f8e-cd583eec165b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.591284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f79006-267e-4e4f-b023-1d4e3f31c23a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.640968] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d18306-9707-4fbd-bee3-d79c44cb9d6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.656355] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baea44a2-d843-4aab-9ee8-c2f09ac8e4ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.683093] env[62914]: DEBUG nova.compute.provider_tree [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.123933] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Successfully created port: a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.190455] env[62914]: DEBUG nova.scheduler.client.report [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.359324] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 573.414172] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.415059] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.415420] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.415793] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.415793] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.415881] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.418159] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.418159] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.418159] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.418159] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.418159] env[62914]: DEBUG nova.virt.hardware [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.419382] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1ea8cb-5afe-4792-8945-86bf96e0b9d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.429489] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdb97e5-686d-4e3c-be09-25d992161674 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.697386] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.698019] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 573.701033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.313s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.702550] env[62914]: INFO nova.compute.claims [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.209234] env[62914]: DEBUG nova.compute.utils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 574.212853] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 574.212853] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 574.301836] env[62914]: DEBUG nova.policy [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5afd9edccc754bccad878e9bf17f882c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0542b5c4f80141fbb4f129b3451edc4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 574.474088] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Successfully updated port: 90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 574.726456] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 574.787147] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Successfully updated port: 59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 574.918567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be3fe4d-2890-47b1-8052-28dd12fc0e26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.928567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9781503f-d73d-4b47-9862-85e7563bc12e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.965918] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54988bbf-7e3d-4f63-b785-4238ff1228fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.975635] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b763d2-517d-4e95-b5cd-7e37d844fdc0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.984950] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.984950] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquired lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.984950] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.000787] env[62914]: DEBUG nova.compute.provider_tree [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.056687] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Successfully created port: 917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 575.292686] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.294123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.294478] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.504266] env[62914]: DEBUG nova.scheduler.client.report [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.681159] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.750015] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 575.795424] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.795424] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.795424] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.795734] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.797229] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.797571] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.802028] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.802028] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.802028] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.802028] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.802028] env[62914]: DEBUG nova.virt.hardware [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.810489] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caab2b5-c961-4a6c-af34-edd1a06a96ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.825944] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e429fd2d-c3bc-4705-8135-77a62ba365e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.010247] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.012391] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 576.016412] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.358s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.017558] env[62914]: INFO nova.compute.claims [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.118026] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.529033] env[62914]: DEBUG nova.compute.utils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 576.535264] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 576.535655] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 576.604678] env[62914]: DEBUG nova.compute.manager [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Received event network-vif-plugged-90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 576.604678] env[62914]: DEBUG oslo_concurrency.lockutils [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] Acquiring lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.604678] env[62914]: DEBUG oslo_concurrency.lockutils [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.604678] env[62914]: DEBUG oslo_concurrency.lockutils [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.604678] env[62914]: DEBUG nova.compute.manager [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] No waiting events found dispatching network-vif-plugged-90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 576.604934] env[62914]: WARNING nova.compute.manager [req-5ff5d161-a988-4ac7-9d71-d8318564c8a9 req-f8b51e93-5fd2-4bbe-a969-7176098fd76f service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Received unexpected event network-vif-plugged-90a32b17-6fbf-4efa-99d6-610bd414847b for instance with vm_state building and task_state spawning. [ 576.829216] env[62914]: DEBUG nova.policy [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95cfa6169fe14e17a1918cfa6c0fbf87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61536f203aa643608e7cca4cb14723d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 576.856973] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Successfully updated port: a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 577.040056] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 577.052417] env[62914]: DEBUG nova.network.neutron [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Updating instance_info_cache with network_info: [{"id": "90a32b17-6fbf-4efa-99d6-610bd414847b", "address": "fa:16:3e:38:14:59", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90a32b17-6f", "ovs_interfaceid": "90a32b17-6fbf-4efa-99d6-610bd414847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.293488] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbea906e-3dec-426b-be7b-442b41cd82af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.328551] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1305d94-2d24-49a5-9a9a-de8aef30e3f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.369462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.369738] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.369816] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.375027] env[62914]: DEBUG nova.network.neutron [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updating instance_info_cache with network_info: [{"id": "59265cbb-d823-43dc-a07d-d850de95a7d8", "address": "fa:16:3e:32:51:d4", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59265cbb-d8", "ovs_interfaceid": "59265cbb-d823-43dc-a07d-d850de95a7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.375027] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbc0983-eeec-499d-ad91-f3de50a449ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.390904] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cee5cb-92f9-4ad8-a881-c2a136c13c68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.413904] env[62914]: DEBUG nova.compute.provider_tree [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.558265] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Releasing lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.558265] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Instance network_info: |[{"id": "90a32b17-6fbf-4efa-99d6-610bd414847b", "address": "fa:16:3e:38:14:59", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90a32b17-6f", "ovs_interfaceid": "90a32b17-6fbf-4efa-99d6-610bd414847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 577.560020] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:14:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90a32b17-6fbf-4efa-99d6-610bd414847b', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.576418] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.576719] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdf419a8-28fd-4e0d-a89d-000cc239ef26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.599298] env[62914]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 577.599756] env[62914]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62914) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 577.600914] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 577.600914] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating folder: Project (3e7977afd02c4940b9ffaefb78f5de34). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.601198] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3de6e2a4-4a13-494c-bb60-299836fab350 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.614310] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Created folder: Project (3e7977afd02c4940b9ffaefb78f5de34) in parent group-v941773. [ 577.614577] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating folder: Instances. Parent ref: group-v941777. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.614888] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d28e510-2cb6-4599-b7cd-00907aa35d2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.625473] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Created folder: Instances in parent group-v941777. [ 577.625884] env[62914]: DEBUG oslo.service.loopingcall [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.626164] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 577.626523] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef5cc9bd-1cd5-4850-bc05-216a151e40d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.648894] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.648894] env[62914]: value = "task-4831193" [ 577.648894] env[62914]: _type = "Task" [ 577.648894] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.658817] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831193, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.880863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.886264] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Instance network_info: |[{"id": "59265cbb-d823-43dc-a07d-d850de95a7d8", "address": "fa:16:3e:32:51:d4", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59265cbb-d8", "ovs_interfaceid": "59265cbb-d823-43dc-a07d-d850de95a7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 577.886398] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:51:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59265cbb-d823-43dc-a07d-d850de95a7d8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.894981] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Creating folder: Project (2e460060822e4eda931ae402635e9eb6). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.896513] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6091655d-6585-4fbf-ab47-b4993b43a9b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.911609] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Created folder: Project (2e460060822e4eda931ae402635e9eb6) in parent group-v941773. [ 577.911609] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Creating folder: Instances. Parent ref: group-v941780. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.911609] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f11a234-2ed6-4af1-beaa-2672da561150 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.918085] env[62914]: DEBUG nova.scheduler.client.report [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 577.927162] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Created folder: Instances in parent group-v941780. [ 577.927563] env[62914]: DEBUG oslo.service.loopingcall [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.928658] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 577.928658] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ac8ee42-2879-4a46-a88d-2b6a26b062eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.952806] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.952806] env[62914]: value = "task-4831196" [ 577.952806] env[62914]: _type = "Task" [ 577.952806] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.966283] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831196, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.977509] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.060334] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 578.103709] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.106648] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.106648] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.106648] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.106648] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.106648] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.107371] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.107371] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.107371] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.107371] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.107371] env[62914]: DEBUG nova.virt.hardware [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.107562] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3641f8-1f1f-4603-8985-fbf3e0b48946 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.120297] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b5f887-bb6e-4eaf-ab01-bffa320bb017 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.161292] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831193, 'name': CreateVM_Task, 'duration_secs': 0.399048} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.161904] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 578.393783] env[62914]: DEBUG oslo_vmware.service [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99c4513-c907-409f-a45f-6dcd14f0ab68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.404871] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.405114] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.405886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.407313] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3dfcea6-049f-49db-a883-dbd7ff159ec6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.416463] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 578.416463] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eef812-42f0-8ca6-4e5b-89fdca8a5778" [ 578.416463] env[62914]: _type = "Task" [ 578.416463] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.428137] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eef812-42f0-8ca6-4e5b-89fdca8a5778, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.429856] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.429856] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 578.432513] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.818s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.436048] env[62914]: INFO nova.compute.claims [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.471823] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831196, 'name': CreateVM_Task, 'duration_secs': 0.393755} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.472024] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 578.472711] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.723709] env[62914]: DEBUG nova.compute.manager [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Received event network-vif-plugged-a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 578.725018] env[62914]: DEBUG oslo_concurrency.lockutils [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] Acquiring lock "52097338-887e-4c79-8413-abfd7ea26c96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.725018] env[62914]: DEBUG oslo_concurrency.lockutils [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] Lock "52097338-887e-4c79-8413-abfd7ea26c96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.725018] env[62914]: DEBUG oslo_concurrency.lockutils [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] Lock "52097338-887e-4c79-8413-abfd7ea26c96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.725018] env[62914]: DEBUG nova.compute.manager [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] No waiting events found dispatching network-vif-plugged-a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 578.725018] env[62914]: WARNING nova.compute.manager [req-af423648-599a-4a6d-8191-702682d95626 req-91d3ef73-040f-4847-a52e-416117a3c8da service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Received unexpected event network-vif-plugged-a977117b-c407-4071-a0e5-5a31734d1025 for instance with vm_state building and task_state spawning. [ 578.933526] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.933773] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.934758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.934923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.935443] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 578.935659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.936591] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.936916] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65170f1c-706d-4e30-8130-7cef24944b10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.939846] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d0942c9-b068-43d2-99ba-90201175d31a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.945285] env[62914]: DEBUG nova.compute.utils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.953486] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 578.953677] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.963190] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 578.963190] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 578.965284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495ec00f-9bdc-4874-8868-e402b3f49720 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.972143] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 578.972143] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b2bc7d-63d2-3854-f029-4bcf35be5222" [ 578.972143] env[62914]: _type = "Task" [ 578.972143] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.990016] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-908d7511-9f83-4410-848e-a5df927cebbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.996238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.996238] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.996353] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.999356] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 578.999356] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f21195-019f-d2ca-aadf-8105fe99413e" [ 578.999356] env[62914]: _type = "Task" [ 578.999356] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.013207] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f21195-019f-d2ca-aadf-8105fe99413e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.017077] env[62914]: DEBUG nova.network.neutron [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Updating instance_info_cache with network_info: [{"id": "a977117b-c407-4071-a0e5-5a31734d1025", "address": "fa:16:3e:34:92:c1", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa977117b-c4", "ovs_interfaceid": "a977117b-c407-4071-a0e5-5a31734d1025", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.119740] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Successfully created port: 7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.224954] env[62914]: DEBUG nova.policy [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30df25dc7985459e82f7995d32579174', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5d08d972640404c8f49b8fd932c5fae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 579.458453] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 579.479531] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Successfully updated port: 917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.517870] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 579.517870] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating directory with path [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.517870] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8912972a-2e5f-4e70-9363-bd64a819bf33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.520574] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.522809] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Instance network_info: |[{"id": "a977117b-c407-4071-a0e5-5a31734d1025", "address": "fa:16:3e:34:92:c1", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa977117b-c4", "ovs_interfaceid": "a977117b-c407-4071-a0e5-5a31734d1025", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 579.528372] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:92:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a977117b-c407-4071-a0e5-5a31734d1025', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.542563] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Creating folder: Project (15a48cfbcf1a492cbbca942d7ddb570d). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 579.547020] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a00729b-16b0-4264-8bb1-1e07fdf513b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.563153] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Created folder: Project (15a48cfbcf1a492cbbca942d7ddb570d) in parent group-v941773. [ 579.563153] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Creating folder: Instances. Parent ref: group-v941783. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 579.564370] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34f7442f-5d8f-4330-9a8b-0a5fa7b56a7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.566448] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Created directory with path [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.566448] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Fetch image to [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 579.566533] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Downloading image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk on the data store datastore2 {{(pid=62914) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 579.568819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54f0b39-70b4-4ca4-8605-6c982c62e713 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.588487] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0377ceb1-879e-4f63-9853-3ff749bdadab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.591839] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Created folder: Instances in parent group-v941783. [ 579.591839] env[62914]: DEBUG oslo.service.loopingcall [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.592043] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 579.592827] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5187d1a-232c-4759-9395-f62eb49cba79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.627209] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210fecd9-8c74-4912-8615-3e33b0458107 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.631746] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.631746] env[62914]: value = "task-4831200" [ 579.631746] env[62914]: _type = "Task" [ 579.631746] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.666868] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae2a53f-5ffc-42a5-b99d-61801695317a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.679852] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831200, 'name': CreateVM_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.679994] env[62914]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c00aac59-e5d1-46cf-844a-73fccf2185ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.751570] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4307e2-d809-424b-96c7-05570c167933 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.761312] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa12bc4a-1008-4fad-8f61-7d07d94bee42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.801357] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24ed67d-1384-4e86-b1a2-2f3dd7c9455b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.805109] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Downloading image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to the data store datastore2 {{(pid=62914) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 579.812750] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40396c57-708b-419d-8ffe-d3ac96ba05cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.828025] env[62914]: DEBUG nova.compute.provider_tree [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.884728] env[62914]: DEBUG oslo_vmware.rw_handles [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 579.985662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.985662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquired lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.985662] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 580.145647] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831200, 'name': CreateVM_Task, 'duration_secs': 0.373578} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.146219] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 580.147222] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.147446] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.147817] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.148232] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52b5ba42-edae-4d8a-9b19-13132d7c9465 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.159414] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 580.159414] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e65aca-ca84-9d58-6b60-8a8821ebcb83" [ 580.159414] env[62914]: _type = "Task" [ 580.159414] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.171531] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e65aca-ca84-9d58-6b60-8a8821ebcb83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.333961] env[62914]: DEBUG nova.scheduler.client.report [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 580.476162] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 580.520429] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 580.520429] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 580.520429] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.521011] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 580.522163] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.522374] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 580.522619] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 580.522786] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 580.522964] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 580.523176] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 580.523390] env[62914]: DEBUG nova.virt.hardware [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 580.524483] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73478632-7e9c-4cb9-9fc2-f7bf4a8ea300 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.542718] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119918f9-230e-4c45-8211-26f8717d369e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.615839] env[62914]: DEBUG oslo_vmware.rw_handles [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 580.615839] env[62914]: DEBUG oslo_vmware.rw_handles [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 580.627252] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.671441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.671704] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 580.671915] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.762584] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Downloaded image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk on the data store datastore2 {{(pid=62914) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 580.765314] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 580.765616] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Copying Virtual Disk [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk to [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 580.766039] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5262a38-ca2b-4cfc-85e6-14acefcbefcf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.777027] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 580.777027] env[62914]: value = "task-4831201" [ 580.777027] env[62914]: _type = "Task" [ 580.777027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.787847] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.822563] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Successfully created port: 3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.841693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.841755] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 580.849361] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.250s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.849361] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.849646] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 580.851223] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc5dc23-06aa-43f6-8086-a149b8869a7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.864378] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23a8396-b93f-4ea8-bc5e-0808407a74e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.895012] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c4dbfb-6a38-44ae-b7df-c1a2d61401b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.911223] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc9954c-2121-4a37-b154-755f31841617 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.943929] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180597MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 580.944152] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.944313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.295200] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831201, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.351969] env[62914]: DEBUG nova.compute.utils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 581.356075] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 581.356075] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 581.478934] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Received event network-vif-plugged-59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 581.480131] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Acquiring lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.480407] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.480577] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.484020] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] No waiting events found dispatching network-vif-plugged-59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 581.484228] env[62914]: WARNING nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Received unexpected event network-vif-plugged-59265cbb-d823-43dc-a07d-d850de95a7d8 for instance with vm_state building and task_state spawning. [ 581.487019] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Received event network-changed-90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 581.487019] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Refreshing instance network info cache due to event network-changed-90a32b17-6fbf-4efa-99d6-610bd414847b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 581.487019] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Acquiring lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.487019] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Acquired lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.487019] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Refreshing network info cache for port 90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 581.510731] env[62914]: DEBUG nova.network.neutron [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Updating instance_info_cache with network_info: [{"id": "917f7d2d-3256-481e-9892-13779b20ab0f", "address": "fa:16:3e:f4:00:45", "network": {"id": "e50d3d81-9efd-40d8-b89f-3cec127b9720", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1685373553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0542b5c4f80141fbb4f129b3451edc4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917f7d2d-32", "ovs_interfaceid": "917f7d2d-3256-481e-9892-13779b20ab0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.637508] env[62914]: DEBUG nova.policy [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dda0f12511324c52b00236c75b33acc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e2b3db08ee34716be135d72b3ddda8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 581.791985] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689116} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.792280] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Copied Virtual Disk [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk to [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 581.792420] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleting the datastore file [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 581.793024] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7be6c26e-09d2-4992-86b3-891b984ea6a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.802210] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 581.802210] env[62914]: value = "task-4831203" [ 581.802210] env[62914]: _type = "Task" [ 581.802210] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.812900] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.859727] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 581.990875] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 43edad1f-cff0-4d3c-a721-98277d1cddc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.991521] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 3eff61b1-b09c-4a04-821c-cefdc7be3f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.991989] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 52097338-887e-4c79-8413-abfd7ea26c96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.992144] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance a4fca617-da38-4913-b2c8-a2921da6db56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.993390] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 61e36e7b-aaa1-420e-bd43-f0184b56581b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.993490] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance db31a794-3928-41bb-afd8-14fae9357654 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 581.993615] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance aede8da7-8bf2-4963-b08b-6e06007614a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 582.017904] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Releasing lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.017904] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Instance network_info: |[{"id": "917f7d2d-3256-481e-9892-13779b20ab0f", "address": "fa:16:3e:f4:00:45", "network": {"id": "e50d3d81-9efd-40d8-b89f-3cec127b9720", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1685373553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0542b5c4f80141fbb4f129b3451edc4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917f7d2d-32", "ovs_interfaceid": "917f7d2d-3256-481e-9892-13779b20ab0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 582.018171] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:00:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '917f7d2d-3256-481e-9892-13779b20ab0f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 582.032434] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Creating folder: Project (0542b5c4f80141fbb4f129b3451edc4d). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 582.032785] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df682bd8-11e4-4720-a8b9-a3dd7d666779 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.050302] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Created folder: Project (0542b5c4f80141fbb4f129b3451edc4d) in parent group-v941773. [ 582.050302] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Creating folder: Instances. Parent ref: group-v941787. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 582.050302] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aaee8fc-57fb-414e-8f79-068e2610156b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.063511] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Created folder: Instances in parent group-v941787. [ 582.064319] env[62914]: DEBUG oslo.service.loopingcall [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.064319] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 582.064319] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0208978c-9277-47a9-b3bd-a163c3eb479b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.095235] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 582.095235] env[62914]: value = "task-4831206" [ 582.095235] env[62914]: _type = "Task" [ 582.095235] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.106116] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831206, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.156587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.156870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.235772] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Successfully updated port: 7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 582.320432] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026547} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.324173] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 582.324401] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Moving file from [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f/75c43660-b52b-450e-ba36-0f721e14bc6c to [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c. {{(pid=62914) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 582.325644] env[62914]: DEBUG nova.compute.manager [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Received event network-vif-plugged-917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 582.325846] env[62914]: DEBUG oslo_concurrency.lockutils [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] Acquiring lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.328446] env[62914]: DEBUG oslo_concurrency.lockutils [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] Lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.328778] env[62914]: DEBUG oslo_concurrency.lockutils [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] Lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.003s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.329198] env[62914]: DEBUG nova.compute.manager [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] No waiting events found dispatching network-vif-plugged-917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 582.329351] env[62914]: WARNING nova.compute.manager [req-8b241f7d-6beb-4cd9-93dc-d0d8d9b89baa req-b183b132-366a-4afc-9ef4-f57609cbb9b4 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Received unexpected event network-vif-plugged-917f7d2d-3256-481e-9892-13779b20ab0f for instance with vm_state building and task_state spawning. [ 582.329702] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-6e3efa95-7673-44ce-b047-98fb28f1cbe4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.343894] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 582.343894] env[62914]: value = "task-4831207" [ 582.343894] env[62914]: _type = "Task" [ 582.343894] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.361533] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831207, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.498495] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 9e39cfb8-e277-4798-92b0-b54f310ef2f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 582.498495] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 582.498758] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_building': '7', 'num_task_spawning': '6', 'num_os_type_None': '7', 'num_proj_3e7977afd02c4940b9ffaefb78f5de34': '1', 'io_workload': '7', 'num_proj_2e460060822e4eda931ae402635e9eb6': '1', 'num_proj_15a48cfbcf1a492cbbca942d7ddb570d': '1', 'num_proj_0542b5c4f80141fbb4f129b3451edc4d': '1', 'num_proj_61536f203aa643608e7cca4cb14723d7': '1', 'num_proj_a5d08d972640404c8f49b8fd932c5fae': '1', 'num_task_networking': '1', 'num_proj_8e2b3db08ee34716be135d72b3ddda8d': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 582.613267] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831206, 'name': CreateVM_Task, 'duration_secs': 0.381241} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.617289] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 582.618543] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.618751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.619121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 582.621403] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-581aaa21-a00a-42a0-9736-5235b912c34d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.626521] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 582.626521] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521434c7-8b94-7488-d47b-9726890ad401" [ 582.626521] env[62914]: _type = "Task" [ 582.626521] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.641769] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521434c7-8b94-7488-d47b-9726890ad401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.659585] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 582.698574] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be62c94-f5ff-42d2-bc65-56c1e738f909 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.706602] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20a4ec6-eed7-4cd3-ad51-ffeb6e36c42f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.741294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.741680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquired lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.741680] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 582.747177] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1471d75d-3336-4908-87d0-7359626a435a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.761568] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c22d5d6-5f3f-4a72-a045-9c259305fa49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.783615] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.853800] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831207, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.039561} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.854512] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] File moved {{(pid=62914) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 582.854512] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Cleaning up location [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 582.854512] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleting the datastore file [datastore2] vmware_temp/3b718f17-afda-46e1-98e5-c3b6c3c5133f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 582.855757] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fed07804-eb46-445f-9c56-4a33c682c5de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.862766] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 582.862766] env[62914]: value = "task-4831208" [ 582.862766] env[62914]: _type = "Task" [ 582.862766] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.874119] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.877606] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 582.908639] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.908897] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.909105] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.909317] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.909533] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.909591] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.910062] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.910255] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.910500] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.910682] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.910851] env[62914]: DEBUG nova.virt.hardware [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.911824] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29dd67e-c3b1-4027-8b4d-62565248c100 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.921847] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8e3d9c-ec2f-4e9d-905a-2fd8fba8e9b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.138349] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521434c7-8b94-7488-d47b-9726890ad401, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.138659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.138928] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.139120] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.194962] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.235556] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Updated VIF entry in instance network info cache for port 90a32b17-6fbf-4efa-99d6-610bd414847b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 583.235556] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Updating instance_info_cache with network_info: [{"id": "90a32b17-6fbf-4efa-99d6-610bd414847b", "address": "fa:16:3e:38:14:59", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90a32b17-6f", "ovs_interfaceid": "90a32b17-6fbf-4efa-99d6-610bd414847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.273588] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Successfully created port: ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.287972] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.358060] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.375431] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034365} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.375686] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 583.376480] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54305c6f-c809-4ffa-8c69-9f25f25706d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.383964] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 583.383964] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff8d57-163c-3990-c071-2114c6aa5438" [ 583.383964] env[62914]: _type = "Task" [ 583.383964] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.394835] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff8d57-163c-3990-c071-2114c6aa5438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.737661] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Releasing lock "refresh_cache-43edad1f-cff0-4d3c-a721-98277d1cddc2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.738017] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Received event network-changed-59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 583.738300] env[62914]: DEBUG nova.compute.manager [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Refreshing instance network info cache due to event network-changed-59265cbb-d823-43dc-a07d-d850de95a7d8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 583.739497] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Acquiring lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.739659] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Acquired lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.739832] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Refreshing network info cache for port 59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 583.794820] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 583.795063] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.851s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.796668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.601s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.798660] env[62914]: INFO nova.compute.claims [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.910247] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff8d57-163c-3990-c071-2114c6aa5438, 'name': SearchDatastore_Task, 'duration_secs': 0.010901} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.912639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.912950] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 43edad1f-cff0-4d3c-a721-98277d1cddc2/43edad1f-cff0-4d3c-a721-98277d1cddc2.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 583.917924] env[62914]: DEBUG nova.compute.manager [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Received event network-changed-a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 583.919243] env[62914]: DEBUG nova.compute.manager [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Refreshing instance network info cache due to event network-changed-a977117b-c407-4071-a0e5-5a31734d1025. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 583.919504] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Acquiring lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.919675] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Acquired lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.919803] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Refreshing network info cache for port a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 583.924140] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.924287] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 583.924449] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d58b2a30-301c-4489-9699-1e8bd581e244 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.928766] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8906b4ca-01e6-4380-a294-500e3153a1b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.941505] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 583.941505] env[62914]: value = "task-4831210" [ 583.941505] env[62914]: _type = "Task" [ 583.941505] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.942742] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 583.942906] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 583.950712] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd45939-ba97-43b3-aa65-0d1c368144cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.960304] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.961732] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 583.961732] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bd25d-7cce-6bc5-65c8-7efdf583f677" [ 583.961732] env[62914]: _type = "Task" [ 583.961732] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.973872] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bd25d-7cce-6bc5-65c8-7efdf583f677, 'name': SearchDatastore_Task, 'duration_secs': 0.009251} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.974947] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0ff75f0-a8d9-4e6a-8592-7fc1ef996c60 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.981512] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 583.981512] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527993b6-14fd-3073-a178-d52b6f112eeb" [ 583.981512] env[62914]: _type = "Task" [ 583.981512] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.993463] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527993b6-14fd-3073-a178-d52b6f112eeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.290538] env[62914]: DEBUG nova.network.neutron [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Updating instance_info_cache with network_info: [{"id": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "address": "fa:16:3e:c4:ab:b0", "network": {"id": "8a08ae7c-cab2-44bd-9284-52c0337d5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-638020922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61536f203aa643608e7cca4cb14723d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a9ccc3c-c2", "ovs_interfaceid": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.462754] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831210, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.495182] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527993b6-14fd-3073-a178-d52b6f112eeb, 'name': SearchDatastore_Task, 'duration_secs': 0.009135} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.495529] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.496469] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 3eff61b1-b09c-4a04-821c-cefdc7be3f64/3eff61b1-b09c-4a04-821c-cefdc7be3f64.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 584.496469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.496469] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.497107] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04719ac1-fe04-481f-aa7d-7bbdc0c307ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.502023] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dc88248-3728-40da-9185-02bd62f133ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.507124] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 584.507124] env[62914]: value = "task-4831211" [ 584.507124] env[62914]: _type = "Task" [ 584.507124] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.515533] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.515533] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 584.516961] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ddfea95-c4e3-4e3b-aa45-1a881394d653 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.523600] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.528759] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 584.528759] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52339e2d-2ff1-c533-fc5d-2ad42725f788" [ 584.528759] env[62914]: _type = "Task" [ 584.528759] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.539008] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52339e2d-2ff1-c533-fc5d-2ad42725f788, 'name': SearchDatastore_Task, 'duration_secs': 0.008949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.540042] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca147988-d5dc-4832-8c6e-e3105ceb8d2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.546600] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 584.546600] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b428ac-bf90-ba91-76cb-19ff866697c6" [ 584.546600] env[62914]: _type = "Task" [ 584.546600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.557494] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b428ac-bf90-ba91-76cb-19ff866697c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.567990] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.568361] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.795766] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Releasing lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.796657] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Instance network_info: |[{"id": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "address": "fa:16:3e:c4:ab:b0", "network": {"id": "8a08ae7c-cab2-44bd-9284-52c0337d5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-638020922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61536f203aa643608e7cca4cb14723d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a9ccc3c-c2", "ovs_interfaceid": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 584.800494] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:ab:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '471f65a5-21ea-45e3-a722-4e204ed65673', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 584.810762] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Creating folder: Project (61536f203aa643608e7cca4cb14723d7). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.810762] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb769ff9-47fc-4a59-8d5f-1c9134d60593 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.826212] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Created folder: Project (61536f203aa643608e7cca4cb14723d7) in parent group-v941773. [ 584.826421] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Creating folder: Instances. Parent ref: group-v941790. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.826898] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36f3ccb1-07cf-48e4-8aa0-7acf0a4c0569 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.842875] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Created folder: Instances in parent group-v941790. [ 584.843322] env[62914]: DEBUG oslo.service.loopingcall [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.843627] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 584.843964] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb692646-78e1-4f03-91f7-157e283ed746 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.874547] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 584.874547] env[62914]: value = "task-4831214" [ 584.874547] env[62914]: _type = "Task" [ 584.874547] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.883896] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831214, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.958679] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.962454] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 43edad1f-cff0-4d3c-a721-98277d1cddc2/43edad1f-cff0-4d3c-a721-98277d1cddc2.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 584.962744] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 584.963565] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a74172df-733c-4191-8d21-3b46f44009d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.973842] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 584.973842] env[62914]: value = "task-4831215" [ 584.973842] env[62914]: _type = "Task" [ 584.973842] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.988609] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.018623] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831211, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.036168] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb45d89-29da-436c-aed1-8fccafb224fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.048411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f92ff47-4915-4b01-8602-e2d0c54b7d01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.062351] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b428ac-bf90-ba91-76cb-19ff866697c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008901} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.093064] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.094022] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 52097338-887e-4c79-8413-abfd7ea26c96/52097338-887e-4c79-8413-abfd7ea26c96.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 585.094022] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 585.100333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.101262] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.102400] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70425497-1dc3-4c6d-8919-10628677a72c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.108574] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27bbd40-81c3-4355-81a0-8d60994d67eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.116855] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ac558f9-0c3a-4bf3-b52d-df573845ebff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.130277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49c4412-4863-4538-be75-126be0208642 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.136688] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 585.136688] env[62914]: value = "task-4831216" [ 585.136688] env[62914]: _type = "Task" [ 585.136688] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.148808] env[62914]: DEBUG nova.compute.provider_tree [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.151920] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.156113] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 585.162200] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d4ca24a-c228-4e37-97f7-9f535a2e475f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.166550] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.170061] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 585.170061] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ee81f3-4379-bf8d-382e-12b47e093e8f" [ 585.170061] env[62914]: _type = "Task" [ 585.170061] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.191858] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ee81f3-4379-bf8d-382e-12b47e093e8f, 'name': SearchDatastore_Task, 'duration_secs': 0.008911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.192602] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c07ea56c-6b91-4897-89a9-1ca382fb62eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.201531] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 585.201531] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f017bf-108a-5098-3141-71404250bb4e" [ 585.201531] env[62914]: _type = "Task" [ 585.201531] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.218036] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f017bf-108a-5098-3141-71404250bb4e, 'name': SearchDatastore_Task, 'duration_secs': 0.008989} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.218036] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.218036] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] a4fca617-da38-4913-b2c8-a2921da6db56/a4fca617-da38-4913-b2c8-a2921da6db56.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 585.218036] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c439056-4b69-43f5-8158-e706676fa79e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.226049] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 585.226049] env[62914]: value = "task-4831217" [ 585.226049] env[62914]: _type = "Task" [ 585.226049] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.239880] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.280890] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Successfully updated port: 3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 585.388984] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831214, 'name': CreateVM_Task, 'duration_secs': 0.381244} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.389391] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 585.394827] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.395029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.395643] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 585.395751] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05548fe7-1630-4ef0-a07b-79b545b84f1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.404603] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 585.404603] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268596d-c17a-b6dc-f7ac-c8a82f438762" [ 585.404603] env[62914]: _type = "Task" [ 585.404603] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.415433] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268596d-c17a-b6dc-f7ac-c8a82f438762, 'name': SearchDatastore_Task, 'duration_secs': 0.009769} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.417280] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.418318] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 585.418318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.418318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.418318] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.418703] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7a5d219-3994-4da2-a76c-0ee7e314d005 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.428994] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.428994] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 585.433265] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b639410f-4da1-4add-a1de-1f94c7e6782d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.443258] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 585.443258] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5203ede8-7c21-8f29-2df3-03cf704bf104" [ 585.443258] env[62914]: _type = "Task" [ 585.443258] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.454321] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5203ede8-7c21-8f29-2df3-03cf704bf104, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.489045] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08487} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.489339] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 585.490299] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e56534-7ca2-471d-9709-609985c21a92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.516967] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 43edad1f-cff0-4d3c-a721-98277d1cddc2/43edad1f-cff0-4d3c-a721-98277d1cddc2.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 585.517364] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ce8fca4-c53b-4706-93fe-2450ab9887a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.542824] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831211, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.545200] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 585.545200] env[62914]: value = "task-4831218" [ 585.545200] env[62914]: _type = "Task" [ 585.545200] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.555460] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.635247] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Updated VIF entry in instance network info cache for port a977117b-c407-4071-a0e5-5a31734d1025. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 585.635395] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Updating instance_info_cache with network_info: [{"id": "a977117b-c407-4071-a0e5-5a31734d1025", "address": "fa:16:3e:34:92:c1", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa977117b-c4", "ovs_interfaceid": "a977117b-c407-4071-a0e5-5a31734d1025", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.650986] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.652637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.662540] env[62914]: DEBUG nova.scheduler.client.report [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 585.698016] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updated VIF entry in instance network info cache for port 59265cbb-d823-43dc-a07d-d850de95a7d8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 585.698575] env[62914]: DEBUG nova.network.neutron [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updating instance_info_cache with network_info: [{"id": "59265cbb-d823-43dc-a07d-d850de95a7d8", "address": "fa:16:3e:32:51:d4", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59265cbb-d8", "ovs_interfaceid": "59265cbb-d823-43dc-a07d-d850de95a7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.737783] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.787239] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.787493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquired lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.787709] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.959125] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5203ede8-7c21-8f29-2df3-03cf704bf104, 'name': SearchDatastore_Task, 'duration_secs': 0.011745} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.960537] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a79a51b9-78a5-4f9e-9f05-149bde7c766d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.968557] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 585.968557] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52176c6d-0f32-4826-c6ac-39e713127093" [ 585.968557] env[62914]: _type = "Task" [ 585.968557] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.985024] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52176c6d-0f32-4826-c6ac-39e713127093, 'name': SearchDatastore_Task, 'duration_secs': 0.010435} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.985024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.985024] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 61e36e7b-aaa1-420e-bd43-f0184b56581b/61e36e7b-aaa1-420e-bd43-f0184b56581b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 585.985665] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4696799c-362b-4422-ae8c-deb5bcf29dad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.995409] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 585.995409] env[62914]: value = "task-4831220" [ 585.995409] env[62914]: _type = "Task" [ 585.995409] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.006442] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.034649] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831211, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.455563} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.034649] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 3eff61b1-b09c-4a04-821c-cefdc7be3f64/3eff61b1-b09c-4a04-821c-cefdc7be3f64.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 586.034649] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 586.034649] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b57ce89a-beb0-4bbe-8d4a-8d188a4cfd2d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.046315] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 586.046315] env[62914]: value = "task-4831221" [ 586.046315] env[62914]: _type = "Task" [ 586.046315] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.079404] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.079404] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.151023] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Releasing lock "refresh_cache-52097338-887e-4c79-8413-abfd7ea26c96" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.151023] env[62914]: DEBUG nova.compute.manager [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Received event network-changed-917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 586.151023] env[62914]: DEBUG nova.compute.manager [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Refreshing instance network info cache due to event network-changed-917f7d2d-3256-481e-9892-13779b20ab0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 586.151023] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Acquiring lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.151023] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Acquired lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.151299] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Refreshing network info cache for port 917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 586.161488] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831216, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.173235] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.173445] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 586.177348] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.523s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.179679] env[62914]: INFO nova.compute.claims [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.202315] env[62914]: DEBUG oslo_concurrency.lockutils [req-0452faf2-edd9-43c7-b39d-fdf9aded12aa req-262dda16-a03a-4f25-b488-f8fee106bfa2 service nova] Releasing lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.246031] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831217, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.496627] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.514741] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.567484] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.570291] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082414} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.570639] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 586.571527] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865323c5-32b9-4c96-9bde-849fa2b8fc2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.597462] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 3eff61b1-b09c-4a04-821c-cefdc7be3f64/3eff61b1-b09c-4a04-821c-cefdc7be3f64.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 586.600786] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40f0e0c4-f62a-4b01-a97b-7b2a3d86b3bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.625764] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 586.625764] env[62914]: value = "task-4831222" [ 586.625764] env[62914]: _type = "Task" [ 586.625764] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.639208] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831222, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.652838] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831216, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.687852] env[62914]: DEBUG nova.compute.utils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 586.692628] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 586.692838] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 586.740526] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831217, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.456212} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.741292] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] a4fca617-da38-4913-b2c8-a2921da6db56/a4fca617-da38-4913-b2c8-a2921da6db56.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 586.741625] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 586.741756] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f8f0fd6-69fe-4e32-874e-6ba12b926625 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.753867] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 586.753867] env[62914]: value = "task-4831223" [ 586.753867] env[62914]: _type = "Task" [ 586.753867] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.767880] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.855886] env[62914]: DEBUG nova.compute.manager [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Received event network-changed-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 586.855886] env[62914]: DEBUG nova.compute.manager [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Refreshing instance network info cache due to event network-changed-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 586.855886] env[62914]: DEBUG oslo_concurrency.lockutils [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] Acquiring lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.855886] env[62914]: DEBUG oslo_concurrency.lockutils [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] Acquired lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.855886] env[62914]: DEBUG nova.network.neutron [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Refreshing network info cache for port 7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 586.922343] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.922802] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.013627] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.064513] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831218, 'name': ReconfigVM_Task, 'duration_secs': 1.497414} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.064790] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 43edad1f-cff0-4d3c-a721-98277d1cddc2/43edad1f-cff0-4d3c-a721-98277d1cddc2.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 587.065622] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8c29f32-f63f-490c-b73b-627174c7bfbc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.073561] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 587.073561] env[62914]: value = "task-4831224" [ 587.073561] env[62914]: _type = "Task" [ 587.073561] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.082975] env[62914]: DEBUG nova.policy [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9368f83d5fb242a0919f3df24770d367', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82c3284839f54f9bbaab4591a75b5f05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 587.090911] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831224, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.137051] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.150105] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831216, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.568348} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.150385] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 52097338-887e-4c79-8413-abfd7ea26c96/52097338-887e-4c79-8413-abfd7ea26c96.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 587.150596] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 587.150860] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f48b2ded-e0f5-4978-af25-eb05afc32911 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.160586] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 587.160586] env[62914]: value = "task-4831225" [ 587.160586] env[62914]: _type = "Task" [ 587.160586] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.169601] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.196022] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 587.266731] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096102} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.273037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 587.276821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79be62a-eeb6-4cab-92b3-5f42b30666cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.315949] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] a4fca617-da38-4913-b2c8-a2921da6db56/a4fca617-da38-4913-b2c8-a2921da6db56.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 587.317904] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0e363ea-d6f2-4e5b-96a2-c27325c99e04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.338531] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 587.338531] env[62914]: value = "task-4831226" [ 587.338531] env[62914]: _type = "Task" [ 587.338531] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.349024] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.428227] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 587.448134] env[62914]: DEBUG nova.network.neutron [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Updating instance_info_cache with network_info: [{"id": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "address": "fa:16:3e:eb:b4:87", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b909a06-3f", "ovs_interfaceid": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.456105] env[62914]: DEBUG nova.compute.manager [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Received event network-vif-plugged-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 587.456105] env[62914]: DEBUG oslo_concurrency.lockutils [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] Acquiring lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.456105] env[62914]: DEBUG oslo_concurrency.lockutils [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.456105] env[62914]: DEBUG oslo_concurrency.lockutils [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.456105] env[62914]: DEBUG nova.compute.manager [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] No waiting events found dispatching network-vif-plugged-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 587.456301] env[62914]: WARNING nova.compute.manager [req-01c901df-f956-4eda-a87a-7728064e6148 req-92959474-139f-490c-a461-877f1a893fec service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Received unexpected event network-vif-plugged-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 for instance with vm_state building and task_state spawning. [ 587.495279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac953ed1-35c2-4f74-8894-9d2cc8cf6e97 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.509107] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7cc101-84b7-4086-966d-7bbdf3f6353e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.519635] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831220, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.063715} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.547786] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 61e36e7b-aaa1-420e-bd43-f0184b56581b/61e36e7b-aaa1-420e-bd43-f0184b56581b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 587.547786] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 587.547786] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-445e3b37-e170-4d34-93c0-1d9c02c720f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.549575] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651227da-95fd-4a12-b8fc-c8328236c686 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.560637] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f9eadb-c1b1-4f7d-ab90-a6b6f332c7d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.567049] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 587.567049] env[62914]: value = "task-4831227" [ 587.567049] env[62914]: _type = "Task" [ 587.567049] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.583924] env[62914]: DEBUG nova.compute.provider_tree [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.591162] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.598538] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831224, 'name': Rename_Task, 'duration_secs': 0.177817} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.598538] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 587.598934] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6b3c420-b205-4694-86f7-aada13a1d3ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.606785] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 587.606785] env[62914]: value = "task-4831228" [ 587.606785] env[62914]: _type = "Task" [ 587.606785] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.622107] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.638326] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831222, 'name': ReconfigVM_Task, 'duration_secs': 0.662485} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.638647] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 3eff61b1-b09c-4a04-821c-cefdc7be3f64/3eff61b1-b09c-4a04-821c-cefdc7be3f64.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 587.639383] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0a7ce2e-5918-4eb9-98ee-13ef4c14ec85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.645941] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 587.645941] env[62914]: value = "task-4831229" [ 587.645941] env[62914]: _type = "Task" [ 587.645941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.657036] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831229, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.672198] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071171} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.672755] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 587.673835] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a4229b-cd29-49ab-a995-522bcb1fac29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.703014] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 52097338-887e-4c79-8413-abfd7ea26c96/52097338-887e-4c79-8413-abfd7ea26c96.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 587.709909] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf42a625-97d9-4709-8ed9-9db7a6d1072c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.732377] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 587.732377] env[62914]: value = "task-4831230" [ 587.732377] env[62914]: _type = "Task" [ 587.732377] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.749471] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831230, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.758285] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Successfully updated port: ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.851054] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831226, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.953666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Releasing lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.954098] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Instance network_info: |[{"id": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "address": "fa:16:3e:eb:b4:87", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b909a06-3f", "ovs_interfaceid": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 587.954497] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:b4:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b909a06-3f47-4b08-8330-c3ac1c957a35', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.965364] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Creating folder: Project (a5d08d972640404c8f49b8fd932c5fae). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.966570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.966830] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74d2acff-2036-44c1-81e5-28d754d829aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.979192] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Created folder: Project (a5d08d972640404c8f49b8fd932c5fae) in parent group-v941773. [ 587.979479] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Creating folder: Instances. Parent ref: group-v941793. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.982990] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d624997-3d3a-4d9d-96fd-858bff53f787 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.988056] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Updated VIF entry in instance network info cache for port 917f7d2d-3256-481e-9892-13779b20ab0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 587.988408] env[62914]: DEBUG nova.network.neutron [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Updating instance_info_cache with network_info: [{"id": "917f7d2d-3256-481e-9892-13779b20ab0f", "address": "fa:16:3e:f4:00:45", "network": {"id": "e50d3d81-9efd-40d8-b89f-3cec127b9720", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1685373553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0542b5c4f80141fbb4f129b3451edc4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap917f7d2d-32", "ovs_interfaceid": "917f7d2d-3256-481e-9892-13779b20ab0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.000557] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Created folder: Instances in parent group-v941793. [ 588.000557] env[62914]: DEBUG oslo.service.loopingcall [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.000557] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 588.000557] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cad103bb-50cf-4b10-9297-fb22cbdf7bf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.025856] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.025856] env[62914]: value = "task-4831233" [ 588.025856] env[62914]: _type = "Task" [ 588.025856] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.039683] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831233, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.084050] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068274} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.090162] env[62914]: DEBUG nova.scheduler.client.report [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.096199] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.098062] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3681ce4-03c9-4794-991e-4b37c38ea1f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.131695] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 61e36e7b-aaa1-420e-bd43-f0184b56581b/61e36e7b-aaa1-420e-bd43-f0184b56581b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.138410] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0922422e-273b-4e5e-bde5-f1d0d4cdd5b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.174854] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.175589] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 588.175589] env[62914]: value = "task-4831234" [ 588.175589] env[62914]: _type = "Task" [ 588.175589] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.184037] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831229, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.194149] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831234, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.230844] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 588.251744] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.266406] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.266406] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.266406] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.282442] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.282750] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.282926] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.283231] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.283421] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.283615] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.284319] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.284319] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.284319] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.284527] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.284759] env[62914]: DEBUG nova.virt.hardware [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.285927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f112dbdf-8426-4917-9fea-9cd9367f714d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.301710] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9839ab00-95cd-4a8e-8cf7-abab5260f68d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.353860] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831226, 'name': ReconfigVM_Task, 'duration_secs': 0.694222} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.354219] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Reconfigured VM instance instance-00000004 to attach disk [datastore2] a4fca617-da38-4913-b2c8-a2921da6db56/a4fca617-da38-4913-b2c8-a2921da6db56.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.354959] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f819f0ba-4600-4fa3-9062-2f3ff2f9292a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.365024] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 588.365024] env[62914]: value = "task-4831235" [ 588.365024] env[62914]: _type = "Task" [ 588.365024] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.380457] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831235, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.395833] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Successfully created port: 16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.491542] env[62914]: DEBUG oslo_concurrency.lockutils [req-5f754827-1208-43d1-9a9f-cfe5a9e71bc4 req-5964bd2c-a509-4e4d-b0fb-793f168b8e11 service nova] Releasing lock "refresh_cache-a4fca617-da38-4913-b2c8-a2921da6db56" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.538831] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831233, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.598626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.600824] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 588.602310] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.636s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.603937] env[62914]: INFO nova.compute.claims [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.634900] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831228, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.680056] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831229, 'name': Rename_Task, 'duration_secs': 0.816649} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.686307] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 588.687429] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d786574-7ed2-43bc-9b28-9e53163ed76f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.699216] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831234, 'name': ReconfigVM_Task, 'duration_secs': 0.387346} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.699949] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 61e36e7b-aaa1-420e-bd43-f0184b56581b/61e36e7b-aaa1-420e-bd43-f0184b56581b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.700766] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 588.700766] env[62914]: value = "task-4831236" [ 588.700766] env[62914]: _type = "Task" [ 588.700766] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.701029] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aaf7d51b-540a-45bc-aaba-c0be14a590da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.715839] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831236, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.717692] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 588.717692] env[62914]: value = "task-4831237" [ 588.717692] env[62914]: _type = "Task" [ 588.717692] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.731317] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831237, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.745392] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831230, 'name': ReconfigVM_Task, 'duration_secs': 0.904673} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.745697] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 52097338-887e-4c79-8413-abfd7ea26c96/52097338-887e-4c79-8413-abfd7ea26c96.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.746539] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-840a4395-030b-415b-b3c8-1fbf98387abc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.755671] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 588.755671] env[62914]: value = "task-4831238" [ 588.755671] env[62914]: _type = "Task" [ 588.755671] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.775130] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831238, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.880909] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831235, 'name': Rename_Task, 'duration_secs': 0.197453} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.881493] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 588.881691] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db4c3877-8d48-44d6-94c1-0657c70b1458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.895080] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 588.895080] env[62914]: value = "task-4831239" [ 588.895080] env[62914]: _type = "Task" [ 588.895080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.905124] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.039934] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831233, 'name': CreateVM_Task, 'duration_secs': 0.581497} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.040302] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 589.040867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.041605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.041605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 589.041751] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46cba728-0493-4962-a5fa-e13cc857fead {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.048768] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 589.048768] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c1eb3-5d6e-008b-18b1-935d87480737" [ 589.048768] env[62914]: _type = "Task" [ 589.048768] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.058551] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c1eb3-5d6e-008b-18b1-935d87480737, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.059481] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.111857] env[62914]: DEBUG nova.compute.utils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.112912] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 589.113101] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.138983] env[62914]: DEBUG oslo_vmware.api [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831228, 'name': PowerOnVM_Task, 'duration_secs': 1.18712} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.139125] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 589.139577] env[62914]: INFO nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Took 20.43 seconds to spawn the instance on the hypervisor. [ 589.139826] env[62914]: DEBUG nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 589.141039] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd21a81a-acb5-48f3-ad82-39bbf3a21d7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.233833] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831236, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.246840] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831237, 'name': Rename_Task, 'duration_secs': 0.159685} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.248190] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 589.248600] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-888a6f45-495a-497d-bfca-9582fbcef531 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.265705] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 589.265705] env[62914]: value = "task-4831240" [ 589.265705] env[62914]: _type = "Task" [ 589.265705] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.279057] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831238, 'name': Rename_Task, 'duration_secs': 0.218949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.279057] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 589.279057] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35a6cd1b-b779-43da-bb60-274ec317ca55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.287059] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831240, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.290575] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 589.290575] env[62914]: value = "task-4831241" [ 589.290575] env[62914]: _type = "Task" [ 589.290575] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.304909] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.414009] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831239, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.415569] env[62914]: DEBUG nova.policy [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01f77b5cfa144f77a8a1d58d775fa174', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfac7a5b4e7349688942cac59bd2adfc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 589.568035] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c1eb3-5d6e-008b-18b1-935d87480737, 'name': SearchDatastore_Task, 'duration_secs': 0.016366} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.568035] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.568035] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.568035] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.568266] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.568319] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.568767] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a809158a-257a-4ed5-9ccb-5a0dbc30066b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.584036] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.584036] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 589.586956] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a6588a2-5e3a-400a-bb77-0f230eb4e489 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.595157] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 589.595157] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cf0543-d302-db20-3aee-a61cc4221e74" [ 589.595157] env[62914]: _type = "Task" [ 589.595157] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.614767] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cf0543-d302-db20-3aee-a61cc4221e74, 'name': SearchDatastore_Task, 'duration_secs': 0.013479} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.616357] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 589.626123] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-babe6dfe-0959-4ed4-8b02-7ea5a1bb6e75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.631678] env[62914]: DEBUG nova.network.neutron [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Updated VIF entry in instance network info cache for port 7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 589.631678] env[62914]: DEBUG nova.network.neutron [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Updating instance_info_cache with network_info: [{"id": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "address": "fa:16:3e:c4:ab:b0", "network": {"id": "8a08ae7c-cab2-44bd-9284-52c0337d5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-638020922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61536f203aa643608e7cca4cb14723d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a9ccc3c-c2", "ovs_interfaceid": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.641543] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 589.641543] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52da9f38-af4f-37d7-7e0e-5cfa343467d2" [ 589.641543] env[62914]: _type = "Task" [ 589.641543] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.665536] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52da9f38-af4f-37d7-7e0e-5cfa343467d2, 'name': SearchDatastore_Task, 'duration_secs': 0.01312} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.669664] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.669969] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] db31a794-3928-41bb-afd8-14fae9357654/db31a794-3928-41bb-afd8-14fae9357654.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 589.675465] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94973999-32b2-41f5-9533-7337d42413b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.680783] env[62914]: INFO nova.compute.manager [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Took 25.33 seconds to build instance. [ 589.693508] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 589.693508] env[62914]: value = "task-4831242" [ 589.693508] env[62914]: _type = "Task" [ 589.693508] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.704598] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.726177] env[62914]: DEBUG oslo_vmware.api [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831236, 'name': PowerOnVM_Task, 'duration_secs': 0.996844} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.727197] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 589.727197] env[62914]: INFO nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Took 18.70 seconds to spawn the instance on the hypervisor. [ 589.727197] env[62914]: DEBUG nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 589.727775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3544ccd1-c773-4c11-8759-d3d246822e01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.786379] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831240, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.803638] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.912740] env[62914]: DEBUG oslo_vmware.api [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831239, 'name': PowerOnVM_Task, 'duration_secs': 0.647619} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.915020] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 589.915020] env[62914]: INFO nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 14.16 seconds to spawn the instance on the hypervisor. [ 589.915020] env[62914]: DEBUG nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 589.915020] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780f5424-0451-4dc3-8fec-138d5f1404f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.931419] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ad683c-0169-4f7c-aa2c-95c0cd76e27e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.941293] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ea61f6-7d9a-4e3f-96ee-7ed323841913 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.978045] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b70707a-54a5-45bb-bb50-fcf9508a21fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.991548] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a62b70-72a4-4f66-a395-4cf73c0bc6b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.014124] env[62914]: DEBUG nova.compute.provider_tree [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.138149] env[62914]: DEBUG oslo_concurrency.lockutils [req-c710ec7f-c133-4641-8854-9aa2d530f92c req-ea79210b-0466-46b2-9d85-7a661560ba2d service nova] Releasing lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.189358] env[62914]: DEBUG oslo_concurrency.lockutils [None req-31b2cf93-d688-45d9-84dd-17ec85efa0c8 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.848s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.208181] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831242, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.257094] env[62914]: INFO nova.compute.manager [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Took 24.77 seconds to build instance. [ 590.281400] env[62914]: DEBUG oslo_vmware.api [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831240, 'name': PowerOnVM_Task, 'duration_secs': 0.647794} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.281707] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 590.282558] env[62914]: INFO nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Took 12.22 seconds to spawn the instance on the hypervisor. [ 590.282969] env[62914]: DEBUG nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 590.283964] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5837c20e-1f5f-467c-929e-2165fbaea656 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.310382] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.413585] env[62914]: DEBUG nova.network.neutron [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.446277] env[62914]: INFO nova.compute.manager [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 24.03 seconds to build instance. [ 590.518409] env[62914]: DEBUG nova.scheduler.client.report [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.557321] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Successfully created port: a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.638347] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 590.682022] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.684549] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.684710] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.685258] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.685258] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.685258] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.685574] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.685635] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.685796] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.686103] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.687135] env[62914]: DEBUG nova.virt.hardware [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.687215] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a12b27-356e-40f8-8bc7-616444dcf844 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.703446] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aba317-e82c-4091-986d-16521d6b53f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.727510] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831242, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.01601} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.727832] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] db31a794-3928-41bb-afd8-14fae9357654/db31a794-3928-41bb-afd8-14fae9357654.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 590.728053] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 590.728341] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1082b541-bf5e-483f-a60f-2d3d687d1050 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.737172] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 590.737172] env[62914]: value = "task-4831243" [ 590.737172] env[62914]: _type = "Task" [ 590.737172] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.753118] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831243, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.760458] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0ac08062-b789-40f1-b0a7-f1aadccfc6c5 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.287s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.812414] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831241, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.818024] env[62914]: INFO nova.compute.manager [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Took 23.47 seconds to build instance. [ 590.916669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.917061] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Instance network_info: |[{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 590.917460] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:16:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae6db457-8035-4a28-bf52-7113144cfe11', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.926086] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Creating folder: Project (8e2b3db08ee34716be135d72b3ddda8d). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.926439] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a8af8d3-659d-4a41-af17-ad85d163295e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.947189] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Created folder: Project (8e2b3db08ee34716be135d72b3ddda8d) in parent group-v941773. [ 590.947189] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Creating folder: Instances. Parent ref: group-v941796. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.947189] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3dcca5e-1a92-410b-8c3d-bc375bd2546c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.950213] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c8bd7c42-f595-410c-bbae-acddceb44c6f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.541s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.960038] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Created folder: Instances in parent group-v941796. [ 590.960350] env[62914]: DEBUG oslo.service.loopingcall [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.960550] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 590.960758] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd71f749-55ed-4596-86f4-605b87a90ec7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.997661] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.997661] env[62914]: value = "task-4831246" [ 590.997661] env[62914]: _type = "Task" [ 590.997661] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.011359] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831246, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.029449] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.031247] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 591.255047] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831243, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139562} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.255047] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 591.258761] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbdf7c7-d70a-46a0-964b-e1fb3d973dbb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.293990] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] db31a794-3928-41bb-afd8-14fae9357654/db31a794-3928-41bb-afd8-14fae9357654.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 591.294713] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2328cf3b-ff54-4fee-b3d2-6cab73afe0df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.325373] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29faac43-3211-4bcb-822c-810599611adc tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.988s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.325879] env[62914]: DEBUG oslo_vmware.api [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831241, 'name': PowerOnVM_Task, 'duration_secs': 1.767309} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.328340] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 591.332198] env[62914]: INFO nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Took 17.97 seconds to spawn the instance on the hypervisor. [ 591.332198] env[62914]: DEBUG nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 591.332198] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 591.332198] env[62914]: value = "task-4831247" [ 591.332198] env[62914]: _type = "Task" [ 591.332198] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.332198] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a324c3-6750-46f1-b2ce-ff8ff1a690dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.508752] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831246, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.536628] env[62914]: DEBUG nova.compute.utils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.538173] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 591.538358] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 591.852233] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.854344] env[62914]: DEBUG nova.policy [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '717bc653b83e47568ac0ee983b656c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4026bc0aca7941a79d5e71bb1a7df1f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.867898] env[62914]: INFO nova.compute.manager [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Took 25.86 seconds to build instance. [ 592.014354] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831246, 'name': CreateVM_Task, 'duration_secs': 0.582883} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.014537] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 592.015243] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.015438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.016026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 592.016337] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ded8d7f-e0a1-4e7c-a80d-c3a827c623f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.027440] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 592.027440] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258af74-276f-f9cc-43e1-c4d3f8e87668" [ 592.027440] env[62914]: _type = "Task" [ 592.027440] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.036406] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258af74-276f-f9cc-43e1-c4d3f8e87668, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.042806] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 592.277709] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Successfully updated port: 16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.349044] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831247, 'name': ReconfigVM_Task, 'duration_secs': 0.602618} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.349044] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Reconfigured VM instance instance-00000006 to attach disk [datastore2] db31a794-3928-41bb-afd8-14fae9357654/db31a794-3928-41bb-afd8-14fae9357654.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.349044] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f07b9f15-4d58-44e1-b0db-5521fb3bcff6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.361240] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 592.361240] env[62914]: value = "task-4831248" [ 592.361240] env[62914]: _type = "Task" [ 592.361240] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.369328] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0aca7b03-b3d9-4bdf-bc41-f6dae0316e75 tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "52097338-887e-4c79-8413-abfd7ea26c96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.376s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.374356] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831248, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.541384] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258af74-276f-f9cc-43e1-c4d3f8e87668, 'name': SearchDatastore_Task, 'duration_secs': 0.013269} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.541799] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.542029] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.542264] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.542418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.542605] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.543079] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c9a4bba-3f39-4f29-b115-fcb20badd1af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.556910] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.556910] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 592.556910] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d59ebdb-5639-4f0a-8ee1-f41bd1a03dc6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.566136] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 592.566136] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521dd4dc-f842-de47-0bee-d2e0e3f614df" [ 592.566136] env[62914]: _type = "Task" [ 592.566136] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.577257] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521dd4dc-f842-de47-0bee-d2e0e3f614df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.781033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.781195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquired lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.781352] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.874703] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831248, 'name': Rename_Task, 'duration_secs': 0.183745} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.874703] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 592.874703] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb1f12c3-eab6-4a4d-a9aa-2948d7a6a797 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.883438] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 592.883438] env[62914]: value = "task-4831249" [ 592.883438] env[62914]: _type = "Task" [ 592.883438] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.895233] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.053895] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 593.080644] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521dd4dc-f842-de47-0bee-d2e0e3f614df, 'name': SearchDatastore_Task, 'duration_secs': 0.012499} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.084503] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56f0dd7a-3794-422f-8765-fca0e4e764b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.096808] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 593.096808] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224673-feec-31fc-1f42-43e3b83ad6b0" [ 593.096808] env[62914]: _type = "Task" [ 593.096808] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.103550] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:23:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='655599099',id=32,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1359831923',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.104018] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.104018] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.104207] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.104347] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.104668] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.104711] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.104831] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.105613] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.105613] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.105613] env[62914]: DEBUG nova.virt.hardware [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.107493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd504c53-62ee-4817-ad88-35154da37b6d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.121240] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224673-feec-31fc-1f42-43e3b83ad6b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.121240] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3109429b-d5de-46dc-b395-d9fac3db3114 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.400321] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831249, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.534090] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.608342] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224673-feec-31fc-1f42-43e3b83ad6b0, 'name': SearchDatastore_Task, 'duration_secs': 0.026965} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.608342] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.608591] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 593.609400] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e137f2c7-7def-4f39-b78c-3a15096d6656 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.620103] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 593.620103] env[62914]: value = "task-4831250" [ 593.620103] env[62914]: _type = "Task" [ 593.620103] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.631605] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.873971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.873971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.901585] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831249, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.134449] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831250, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.197188] env[62914]: DEBUG nova.network.neutron [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Updating instance_info_cache with network_info: [{"id": "16cf2714-36d7-443b-a820-1fe738d54164", "address": "fa:16:3e:89:33:54", "network": {"id": "19be32a9-21c0-4be1-9bc1-1faf46fd1cf6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1668507659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82c3284839f54f9bbaab4591a75b5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16cf2714-36", "ovs_interfaceid": "16cf2714-36d7-443b-a820-1fe738d54164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.344446] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Successfully created port: cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.378167] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 594.398218] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831249, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.410822] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Successfully updated port: a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.558569] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Received event network-vif-plugged-3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 594.558808] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Acquiring lock "db31a794-3928-41bb-afd8-14fae9357654-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.559034] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Lock "db31a794-3928-41bb-afd8-14fae9357654-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.559241] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Lock "db31a794-3928-41bb-afd8-14fae9357654-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.559438] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] No waiting events found dispatching network-vif-plugged-3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 594.559568] env[62914]: WARNING nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Received unexpected event network-vif-plugged-3b909a06-3f47-4b08-8330-c3ac1c957a35 for instance with vm_state building and task_state spawning. [ 594.559721] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Received event network-changed-3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 594.559869] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Refreshing instance network info cache due to event network-changed-3b909a06-3f47-4b08-8330-c3ac1c957a35. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 594.560058] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Acquiring lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.560189] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Acquired lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.560343] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Refreshing network info cache for port 3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.635706] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653002} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.635706] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 594.636947] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.636947] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60ff228d-ae5b-4861-9468-9494e2f22bd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.646468] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 594.646468] env[62914]: value = "task-4831251" [ 594.646468] env[62914]: _type = "Task" [ 594.646468] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.662449] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.705975] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Releasing lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.706364] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Instance network_info: |[{"id": "16cf2714-36d7-443b-a820-1fe738d54164", "address": "fa:16:3e:89:33:54", "network": {"id": "19be32a9-21c0-4be1-9bc1-1faf46fd1cf6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1668507659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82c3284839f54f9bbaab4591a75b5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16cf2714-36", "ovs_interfaceid": "16cf2714-36d7-443b-a820-1fe738d54164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 594.707079] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:33:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '674802e7-b847-4bef-a7a8-f90ac7a3a0a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16cf2714-36d7-443b-a820-1fe738d54164', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.715921] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Creating folder: Project (82c3284839f54f9bbaab4591a75b5f05). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.716309] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-873da234-ab3e-4493-ae00-a17204a3dd81 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.731189] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Created folder: Project (82c3284839f54f9bbaab4591a75b5f05) in parent group-v941773. [ 594.733212] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Creating folder: Instances. Parent ref: group-v941799. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.733212] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-442691c3-fa1c-43ea-b964-96e4e3637cc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.748161] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Created folder: Instances in parent group-v941799. [ 594.748486] env[62914]: DEBUG oslo.service.loopingcall [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.748668] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 594.749079] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-379f9ac2-a242-49a1-ba6d-8a4990d68822 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.781645] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.781645] env[62914]: value = "task-4831254" [ 594.781645] env[62914]: _type = "Task" [ 594.781645] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.799422] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831254, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.872809] env[62914]: DEBUG nova.compute.manager [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Received event network-vif-plugged-ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 594.873578] env[62914]: DEBUG oslo_concurrency.lockutils [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.873578] env[62914]: DEBUG oslo_concurrency.lockutils [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.873578] env[62914]: DEBUG oslo_concurrency.lockutils [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.874756] env[62914]: DEBUG nova.compute.manager [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] No waiting events found dispatching network-vif-plugged-ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 594.875134] env[62914]: WARNING nova.compute.manager [req-910042d7-bb59-420d-98b6-85c62ca02e17 req-c27a4104-548d-4036-b953-2568975a9c38 service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Received unexpected event network-vif-plugged-ae6db457-8035-4a28-bf52-7113144cfe11 for instance with vm_state building and task_state spawning. [ 594.904746] env[62914]: DEBUG oslo_vmware.api [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831249, 'name': PowerOnVM_Task, 'duration_secs': 1.85417} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.904746] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 594.904746] env[62914]: INFO nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Took 14.43 seconds to spawn the instance on the hypervisor. [ 594.904746] env[62914]: DEBUG nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 594.906287] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04b07e2-6516-4645-933f-99c7f419396e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.909999] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.910331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.912340] env[62914]: INFO nova.compute.claims [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.919192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.919192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquired lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.919192] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 595.162083] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154882} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.162494] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.163497] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a360c7-45fe-4ba2-8f84-dba0e859eaf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.207054] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.207664] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3492ef6-2f48-4db9-ac29-5c79f4789ca3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.236314] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 595.236314] env[62914]: value = "task-4831255" [ 595.236314] env[62914]: _type = "Task" [ 595.236314] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.251164] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.251164] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.261573] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.296626] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831254, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.446713] env[62914]: INFO nova.compute.manager [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Took 26.83 seconds to build instance. [ 595.561090] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.751921] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.763604] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 595.797805] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831254, 'name': CreateVM_Task, 'duration_secs': 0.970915} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.797805] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 595.797805] env[62914]: DEBUG oslo_vmware.service [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbcdd1e-688c-4bc9-b34f-e6fa4803a5d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.809026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.809026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.809296] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 595.809509] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e734be1f-e95c-4322-9d33-9a13bdadbc55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.817724] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 595.817724] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5213bee1-9993-74f5-0190-ccf179e6f6af" [ 595.817724] env[62914]: _type = "Task" [ 595.817724] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.829743] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5213bee1-9993-74f5-0190-ccf179e6f6af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.950905] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2943893-8d8d-460c-b5e7-1c2185af2992 tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.347s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.990046] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Updated VIF entry in instance network info cache for port 3b909a06-3f47-4b08-8330-c3ac1c957a35. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 595.990046] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Updating instance_info_cache with network_info: [{"id": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "address": "fa:16:3e:eb:b4:87", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b909a06-3f", "ovs_interfaceid": "3b909a06-3f47-4b08-8330-c3ac1c957a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.080165] env[62914]: DEBUG nova.network.neutron [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updating instance_info_cache with network_info: [{"id": "a37b3d57-45a7-4167-970b-4734a54661f8", "address": "fa:16:3e:85:44:f6", "network": {"id": "3cc69eb5-cd59-4351-8a69-68f647db0af1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-590356650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfac7a5b4e7349688942cac59bd2adfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa37b3d57-45", "ovs_interfaceid": "a37b3d57-45a7-4167-970b-4734a54661f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.172027] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bf4675-30e8-4723-a260-12e608e51331 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.181945] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d970a7-424e-4c3b-825b-d9422519c482 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.217232] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e3edbf-f63b-4cfd-804a-3e53cb73b4bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.225777] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4fa179-2a9c-4b85-af5c-fc58931f69aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.243556] env[62914]: DEBUG nova.compute.provider_tree [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.255572] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831255, 'name': ReconfigVM_Task, 'duration_secs': 0.640091} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.256806] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfigured VM instance instance-00000007 to attach disk [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.257661] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66023ab3-c403-4a91-abdd-c91475c35736 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.266913] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 596.266913] env[62914]: value = "task-4831256" [ 596.266913] env[62914]: _type = "Task" [ 596.266913] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.285257] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831256, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.301468] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.331171] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.331171] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.331171] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.331171] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.331654] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.331820] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14172032-b9ea-47f7-aaeb-67786abb00a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.356535] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.356809] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 596.358468] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b2860d-a7bf-4bf6-ab6a-200574dc11ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.368994] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2e660bf-4c99-4980-add9-3fbc5102d000 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.376048] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 596.376048] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526bd413-b913-0dab-44dd-c81b91c73cf5" [ 596.376048] env[62914]: _type = "Task" [ 596.376048] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.390047] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526bd413-b913-0dab-44dd-c81b91c73cf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.492800] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Releasing lock "refresh_cache-db31a794-3928-41bb-afd8-14fae9357654" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.493504] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Received event network-changed-ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 596.493940] env[62914]: DEBUG nova.compute.manager [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Refreshing instance network info cache due to event network-changed-ae6db457-8035-4a28-bf52-7113144cfe11. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 596.494570] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.494570] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.494570] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Refreshing network info cache for port ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 596.585825] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Releasing lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.586788] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Instance network_info: |[{"id": "a37b3d57-45a7-4167-970b-4734a54661f8", "address": "fa:16:3e:85:44:f6", "network": {"id": "3cc69eb5-cd59-4351-8a69-68f647db0af1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-590356650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfac7a5b4e7349688942cac59bd2adfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa37b3d57-45", "ovs_interfaceid": "a37b3d57-45a7-4167-970b-4734a54661f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 596.587802] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:44:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a37b3d57-45a7-4167-970b-4734a54661f8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.596704] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Creating folder: Project (bfac7a5b4e7349688942cac59bd2adfc). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 596.599101] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bddd3a47-183c-4028-a9b0-fc5c87ecf68d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.604616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.604725] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.617456] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Created folder: Project (bfac7a5b4e7349688942cac59bd2adfc) in parent group-v941773. [ 596.619460] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Creating folder: Instances. Parent ref: group-v941802. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 596.619460] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f2acba0-daf8-4e8e-97c0-95154d5f87b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.634310] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Created folder: Instances in parent group-v941802. [ 596.634651] env[62914]: DEBUG oslo.service.loopingcall [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 596.634924] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 596.635296] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89172f8b-cf2c-412b-a461-eebde198c4f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.658869] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.658869] env[62914]: value = "task-4831259" [ 596.658869] env[62914]: _type = "Task" [ 596.658869] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.668276] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831259, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.747812] env[62914]: DEBUG nova.scheduler.client.report [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.782947] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831256, 'name': Rename_Task, 'duration_secs': 0.220218} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.783299] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 596.784381] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d655c97-1f28-42a4-9812-f2d542e170cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.792486] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 596.792486] env[62914]: value = "task-4831260" [ 596.792486] env[62914]: _type = "Task" [ 596.792486] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.811175] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.887939] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 596.888250] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Creating directory with path [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.888546] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ba20cae-83ba-4714-b7d9-c680405d732a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.924799] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Created directory with path [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.925152] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Fetch image to [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 596.925294] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Downloading image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk on the data store datastore1 {{(pid=62914) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 596.926154] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499a1f75-1b2e-4405-a7f3-8195c88b971f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.937334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7daddf5-1171-41fb-a5ee-4f1675c8cc20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.951735] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ef3edd-45ef-4205-b505-a0bb8c581a3b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.992563] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394b7197-7cd4-42ea-ac22-2cba94e545cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.002941] env[62914]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a061f480-f134-479a-a275-f2bcd7922608 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.107708] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 597.117932] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Downloading image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to the data store datastore1 {{(pid=62914) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 597.176601] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831259, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.210341] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 597.282043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.282754] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 597.297026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.994s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.300715] env[62914]: INFO nova.compute.claims [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.335204] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831260, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.559297] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Successfully updated port: cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.645857] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.678847] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831259, 'name': CreateVM_Task, 'duration_secs': 0.687262} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.679032] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 597.680472] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.682262] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.682262] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 597.682262] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05a8eee1-ff86-4a1f-af11-811f9d5998b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.690066] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 597.690066] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52afb707-388e-35de-562f-3fe5ad937366" [ 597.690066] env[62914]: _type = "Task" [ 597.690066] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.701442] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52afb707-388e-35de-562f-3fe5ad937366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.817023] env[62914]: DEBUG nova.compute.utils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 597.819896] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 597.820262] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 597.832627] env[62914]: DEBUG oslo_vmware.api [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831260, 'name': PowerOnVM_Task, 'duration_secs': 0.71838} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.834132] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 597.834505] env[62914]: INFO nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Took 14.96 seconds to spawn the instance on the hypervisor. [ 597.834807] env[62914]: DEBUG nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 597.837400] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91e0ce1-6c42-47b4-b71c-c733c91ad582 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.885578] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updated VIF entry in instance network info cache for port ae6db457-8035-4a28-bf52-7113144cfe11. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 597.885578] env[62914]: DEBUG nova.network.neutron [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.949060] env[62914]: DEBUG nova.policy [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afa1ce49a8bd4d1a8f3f5ef460a13de5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33e55050ffe94a588a5db112563b5555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 598.058592] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 598.058848] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 598.065109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.065342] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.065643] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.205735] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.206120] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.206451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.247868] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Downloaded image file data 75c43660-b52b-450e-ba36-0f721e14bc6c to vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk on the data store datastore1 {{(pid=62914) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 598.251425] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 598.254537] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Copying Virtual Disk [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk to [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 598.254537] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f4d07bb-7da0-4556-ba49-0c9425a74798 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.264416] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 598.264416] env[62914]: value = "task-4831261" [ 598.264416] env[62914]: _type = "Task" [ 598.264416] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.278889] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.325065] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 598.372393] env[62914]: INFO nova.compute.manager [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Took 28.78 seconds to build instance. [ 598.387069] env[62914]: DEBUG oslo_concurrency.lockutils [req-df800127-2802-4489-abda-5706f456a6cd req-8a162e96-eb36-4969-af07-66cec735c78e service nova] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.648411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea2a182-5b8a-495b-b6dc-002934d59b04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.665601] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.670502] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e1a7da-a728-4eee-b187-29a7b25de240 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.678053] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Successfully created port: 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.708997] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c96213-cfa5-4dfc-bb63-67c5a8d191f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.719375] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d27e7a-7604-4e99-8727-57eada777618 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.738357] env[62914]: DEBUG nova.compute.provider_tree [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.782027] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.874693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-36650cb1-bde3-488e-a1fc-08cdddf6ea0b tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.295s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.088851] env[62914]: DEBUG nova.network.neutron [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updating instance_info_cache with network_info: [{"id": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "address": "fa:16:3e:6b:7c:d2", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf87f855-3a", "ovs_interfaceid": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.225933] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.226385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.243036] env[62914]: DEBUG nova.scheduler.client.report [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.259156] env[62914]: DEBUG nova.compute.manager [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 599.263411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3708b34-298d-419d-aedd-3108562be571 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.292905] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831261, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.344293] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 599.396569] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.397459] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.397736] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.397800] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.398489] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.398639] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.398960] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.399113] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.399521] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.399764] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.400824] env[62914]: DEBUG nova.virt.hardware [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.402605] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff02e8a7-a547-46a5-9e52-20f251a983d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.415819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402c35c2-9062-43cb-bd9c-1542a60b740e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.599405] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.599405] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Instance network_info: |[{"id": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "address": "fa:16:3e:6b:7c:d2", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf87f855-3a", "ovs_interfaceid": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 599.600181] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:7c:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2321dbbe-f64a-4253-a462-21676f8a278e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf87f855-3a4c-43d5-a06f-db1eb5eec958', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.610157] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Creating folder: Project (4026bc0aca7941a79d5e71bb1a7df1f8). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.610157] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bebffdf7-ddf5-4521-93fe-f4692de5c933 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.629499] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Created folder: Project (4026bc0aca7941a79d5e71bb1a7df1f8) in parent group-v941773. [ 599.629499] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Creating folder: Instances. Parent ref: group-v941805. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.629499] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6572f662-3f90-443f-b53c-701749c0076f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.649091] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Created folder: Instances in parent group-v941805. [ 599.649537] env[62914]: DEBUG oslo.service.loopingcall [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.649792] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 599.651106] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb581acc-460d-4f67-9eb4-d2c2737e9cb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.681430] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.681430] env[62914]: value = "task-4831264" [ 599.681430] env[62914]: _type = "Task" [ 599.681430] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.698720] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831264, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.731973] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 599.760626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.761177] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 599.768832] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.123s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.770751] env[62914]: INFO nova.compute.claims [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.794950] env[62914]: INFO nova.compute.manager [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] instance snapshotting [ 599.799222] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831261, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.801142] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64d5c45-e677-4130-9f48-b242261ede39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.827970] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83715917-0111-430d-8b4c-6196c3fd6aac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.201349] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831264, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.216648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.218247] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.268434] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.280518] env[62914]: DEBUG nova.compute.utils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.299126] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 600.299545] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 600.316421] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831261, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.724618} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.317209] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Copied Virtual Disk [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk to [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 600.317209] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleting the datastore file [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c/tmp-sparse.vmdk {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.317565] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75972ea7-b8f3-4e75-a8cd-3cb5a9943179 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.331728] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 600.331728] env[62914]: value = "task-4831265" [ 600.331728] env[62914]: _type = "Task" [ 600.331728] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.344518] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 600.344518] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2e0dae10-a8c7-48de-a108-ede122874f6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.358038] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.360464] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 600.360464] env[62914]: value = "task-4831266" [ 600.360464] env[62914]: _type = "Task" [ 600.360464] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.383187] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831266, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.397696] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Received event network-vif-plugged-16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 600.398377] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquiring lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.398433] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.398680] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.398930] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] No waiting events found dispatching network-vif-plugged-16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 600.399233] env[62914]: WARNING nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Received unexpected event network-vif-plugged-16cf2714-36d7-443b-a820-1fe738d54164 for instance with vm_state building and task_state spawning. [ 600.399492] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Received event network-changed-16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 600.399743] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Refreshing instance network info cache due to event network-changed-16cf2714-36d7-443b-a820-1fe738d54164. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 600.400030] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquiring lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.400237] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquired lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.400460] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Refreshing network info cache for port 16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 600.443658] env[62914]: DEBUG nova.policy [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05f7416f7a244ee1b6507b5ac2b64ce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a83acb637b5c47f395d677ee48e37dae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 600.643327] env[62914]: DEBUG nova.compute.manager [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Received event network-vif-plugged-cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 600.643692] env[62914]: DEBUG oslo_concurrency.lockutils [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] Acquiring lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.643878] env[62914]: DEBUG oslo_concurrency.lockutils [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.643939] env[62914]: DEBUG oslo_concurrency.lockutils [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.644921] env[62914]: DEBUG nova.compute.manager [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] No waiting events found dispatching network-vif-plugged-cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 600.647772] env[62914]: WARNING nova.compute.manager [req-f588a181-849f-4915-84ac-1387a17764b4 req-ed6a0bf8-d23e-4b4e-9f37-477adf6287f5 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Received unexpected event network-vif-plugged-cf87f855-3a4c-43d5-a06f-db1eb5eec958 for instance with vm_state building and task_state spawning. [ 600.705808] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831264, 'name': CreateVM_Task, 'duration_secs': 0.876153} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.706298] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 600.707821] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.708237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.708649] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 600.709045] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b76845b-ae97-4131-b51b-7b63477d384c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.715843] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 600.715843] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c49b9-76e7-0834-10c4-d51b65a0601f" [ 600.715843] env[62914]: _type = "Task" [ 600.715843] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.721248] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 600.732910] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c49b9-76e7-0834-10c4-d51b65a0601f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.789969] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 600.861911] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053182} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.867080] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.867080] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Moving file from [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479/75c43660-b52b-450e-ba36-0f721e14bc6c to [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c. {{(pid=62914) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 600.868372] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-40baea6e-7aad-48a2-a3ee-1caa6159ce1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.877609] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831266, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.882365] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 600.882365] env[62914]: value = "task-4831267" [ 600.882365] env[62914]: _type = "Task" [ 600.882365] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.898101] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831267, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.155018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5806194b-c98f-4b3e-98d9-2e33568e4aba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.164778] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067fc34f-95ce-4145-8645-7e0cf0276154 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.208861] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4535f46a-b289-402c-b365-d1d42421978f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.221413] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e19ca9-373c-4082-9f14-9b4133eef59f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.239581] env[62914]: DEBUG nova.compute.provider_tree [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.249617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.249617] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 601.249617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.267522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.296244] env[62914]: INFO nova.virt.block_device [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Booting with volume 713ab20a-101e-495b-8fb5-6ebb8c0e42dd at /dev/sda [ 601.394230] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831266, 'name': CreateSnapshot_Task, 'duration_secs': 0.955813} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.397654] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 601.398315] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0972f14a-b518-41b0-835b-f86141dbd714 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.406427] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eec5b50-1eb5-4687-a2ca-1c9d37c56c04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.410138] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831267, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.078404} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.410953] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] File moved {{(pid=62914) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 601.411346] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Cleaning up location [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 601.411691] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleting the datastore file [datastore1] vmware_temp/0e4615b3-934d-4537-a2cc-f0ff2cd8d479 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.412435] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00d711ec-2d68-46f5-b256-3f0e377b6b08 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.419882] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2053958-8e59-4ce5-a82e-0bc86cc6ed31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.440031] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 601.440031] env[62914]: value = "task-4831268" [ 601.440031] env[62914]: _type = "Task" [ 601.440031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.452926] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.467867] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4911926e-a4dd-4f63-b3af-07ecb849470a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.473240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Acquiring lock "52097338-887e-4c79-8413-abfd7ea26c96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.475911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "52097338-887e-4c79-8413-abfd7ea26c96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.475911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Acquiring lock "52097338-887e-4c79-8413-abfd7ea26c96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.475911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "52097338-887e-4c79-8413-abfd7ea26c96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.475911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "52097338-887e-4c79-8413-abfd7ea26c96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.478198] env[62914]: INFO nova.compute.manager [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Terminating instance [ 601.482945] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03db6c0-1048-4b94-9743-9aab11f547c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.499723] env[62914]: DEBUG nova.compute.manager [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 601.502039] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 601.503340] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2d3130-bcf2-4694-b01c-3cf24a66f7ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.512727] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 601.513120] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1901d5d-cdd0-4649-89f0-75c6599778bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.530832] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e944db-ef90-4dbc-97e8-f0c8a519248c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.533827] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Waiting for the task: (returnval){ [ 601.533827] env[62914]: value = "task-4831269" [ 601.533827] env[62914]: _type = "Task" [ 601.533827] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.543710] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d4c1d0-0513-4b68-b732-f4a8700c0e1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.550147] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Task: {'id': task-4831269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.565265] env[62914]: DEBUG nova.virt.block_device [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updating existing volume attachment record: 783839bf-fb00-4fb0-a220-04047e7da454 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 601.587533] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Successfully updated port: 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 601.747770] env[62914]: DEBUG nova.scheduler.client.report [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.858009] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Successfully created port: fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.863641] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Updated VIF entry in instance network info cache for port 16cf2714-36d7-443b-a820-1fe738d54164. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 601.863999] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Updating instance_info_cache with network_info: [{"id": "16cf2714-36d7-443b-a820-1fe738d54164", "address": "fa:16:3e:89:33:54", "network": {"id": "19be32a9-21c0-4be1-9bc1-1faf46fd1cf6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1668507659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82c3284839f54f9bbaab4591a75b5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16cf2714-36", "ovs_interfaceid": "16cf2714-36d7-443b-a820-1fe738d54164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.951139] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 601.955512] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-808f5b26-5251-457b-adae-deb461902334 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.964242] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045341} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.965466] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.969024] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 601.969024] env[62914]: value = "task-4831270" [ 601.969024] env[62914]: _type = "Task" [ 601.969024] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.969024] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc51f94d-3f84-4c75-9bd8-8ac28c26891f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.980540] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 601.980540] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4cd8b-4fad-ae6d-3441-ef4326d5302c" [ 601.980540] env[62914]: _type = "Task" [ 601.980540] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.981180] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831270, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.989619] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4cd8b-4fad-ae6d-3441-ef4326d5302c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.049369] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Task: {'id': task-4831269, 'name': PowerOffVM_Task, 'duration_secs': 0.258521} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.049658] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 602.049812] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 602.050068] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de35ecc1-e709-4d8e-b98f-a10be503b529 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.090594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.090594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquired lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.090814] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.120459] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 602.120679] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 602.120958] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Deleting the datastore file [datastore2] 52097338-887e-4c79-8413-abfd7ea26c96 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 602.121691] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93f39fcc-1827-47f0-93b9-c673978b4212 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.132765] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Waiting for the task: (returnval){ [ 602.132765] env[62914]: value = "task-4831272" [ 602.132765] env[62914]: _type = "Task" [ 602.132765] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.142421] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Task: {'id': task-4831272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.255349] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.255832] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 602.259363] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.991s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.260751] env[62914]: INFO nova.compute.claims [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.371884] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Releasing lock "refresh_cache-9e39cfb8-e277-4798-92b0-b54f310ef2f4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.372208] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Received event network-vif-plugged-a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 602.372418] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.372640] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.372816] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.373037] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] No waiting events found dispatching network-vif-plugged-a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 602.373208] env[62914]: WARNING nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Received unexpected event network-vif-plugged-a37b3d57-45a7-4167-970b-4734a54661f8 for instance with vm_state building and task_state spawning. [ 602.373421] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Received event network-changed-a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 602.374068] env[62914]: DEBUG nova.compute.manager [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Refreshing instance network info cache due to event network-changed-a37b3d57-45a7-4167-970b-4734a54661f8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 602.374068] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquiring lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.374068] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Acquired lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.374265] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Refreshing network info cache for port a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.482815] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831270, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.498361] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4cd8b-4fad-ae6d-3441-ef4326d5302c, 'name': SearchDatastore_Task, 'duration_secs': 0.022081} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.498649] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.498963] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9e39cfb8-e277-4798-92b0-b54f310ef2f4/9e39cfb8-e277-4798-92b0-b54f310ef2f4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 602.499542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.499542] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.499665] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7b6dea8-cd0f-4c7f-930e-57d366e14aa7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.502405] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0adeadcd-a15f-4a7b-8294-0e8cf559b548 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.511183] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 602.511183] env[62914]: value = "task-4831273" [ 602.511183] env[62914]: _type = "Task" [ 602.511183] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.516016] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.516380] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 602.517693] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fbb1eaf-2dea-49cb-96f5-9ea386191a10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.530807] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.535631] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 602.535631] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3d354-3fc3-9dd4-ceef-a466edf9ff28" [ 602.535631] env[62914]: _type = "Task" [ 602.535631] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.546403] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3d354-3fc3-9dd4-ceef-a466edf9ff28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.646249] env[62914]: DEBUG oslo_vmware.api [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Task: {'id': task-4831272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187071} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.646557] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 602.646757] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 602.647231] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 602.647231] env[62914]: INFO nova.compute.manager [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Took 1.15 seconds to destroy the instance on the hypervisor. [ 602.647349] env[62914]: DEBUG oslo.service.loopingcall [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.647501] env[62914]: DEBUG nova.compute.manager [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 602.647588] env[62914]: DEBUG nova.network.neutron [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 602.684785] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.698567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.698804] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.699068] env[62914]: INFO nova.compute.manager [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Rebooting instance [ 602.766283] env[62914]: DEBUG nova.compute.utils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 602.772607] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 602.773612] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 602.985708] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831270, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.027635] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831273, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.053214] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3d354-3fc3-9dd4-ceef-a466edf9ff28, 'name': SearchDatastore_Task, 'duration_secs': 0.026278} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.054618] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6de1bc0-8d9b-4f3f-8cff-4efeb2564d43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.062053] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 603.062053] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52515bf9-a999-e00a-e498-8e6c871d48ee" [ 603.062053] env[62914]: _type = "Task" [ 603.062053] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.072665] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52515bf9-a999-e00a-e498-8e6c871d48ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.083835] env[62914]: DEBUG nova.policy [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16ea3ecf7b2448b2893258b3fa472397', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3705e459bbc244e2b1056a8219071ff5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.243699] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.243937] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquired lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.246341] env[62914]: DEBUG nova.network.neutron [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.272979] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 603.303447] env[62914]: DEBUG nova.network.neutron [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updating instance_info_cache with network_info: [{"id": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "address": "fa:16:3e:ec:dc:0a", "network": {"id": "ba7c830d-c4a1-418f-a747-d997a634b13f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1412355570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33e55050ffe94a588a5db112563b5555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e7a9bbc-61", "ovs_interfaceid": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.488691] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831270, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.530612] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.931559} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.531474] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9e39cfb8-e277-4798-92b0-b54f310ef2f4/9e39cfb8-e277-4798-92b0-b54f310ef2f4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 603.531474] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 603.531474] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc9f7492-7ec2-48fe-b077-1259ae1c2698 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.543784] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 603.543784] env[62914]: value = "task-4831274" [ 603.543784] env[62914]: _type = "Task" [ 603.543784] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.556203] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.587105] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52515bf9-a999-e00a-e498-8e6c871d48ee, 'name': SearchDatastore_Task, 'duration_secs': 0.066685} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.587688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.587942] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494/a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 603.588335] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.588502] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.588719] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48b17b6b-6292-44dd-b1c8-fa5472feb099 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.595028] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a57518f-cea6-45ac-994f-d77ca45f35ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.603902] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 603.603902] env[62914]: value = "task-4831275" [ 603.603902] env[62914]: _type = "Task" [ 603.603902] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.609919] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.610591] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 603.615796] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58db3f8e-ac89-4aee-baa6-622887c8c452 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.623371] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.627906] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 603.627906] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200647e-a949-9a7d-465b-cde7b29ca225" [ 603.627906] env[62914]: _type = "Task" [ 603.627906] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.644341] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200647e-a949-9a7d-465b-cde7b29ca225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.736531] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2a269d-e312-480b-8e0f-bc74261872de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.749092] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbc18c6-6894-4705-b2ea-cd0de55e3e61 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.755334] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 603.755851] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.756070] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.756221] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.756400] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.756543] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.756689] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.756933] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.757063] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.757201] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.757361] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.757527] env[62914]: DEBUG nova.virt.hardware [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.759232] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8991f1-544a-48e1-a075-789ef8028e5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.809931] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f47452-7e57-4c3c-97a4-e5c2420bcb04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.813503] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669a1ad2-380d-47d0-a05a-94be00e19b70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.819902] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Releasing lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.821790] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Instance network_info: |[{"id": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "address": "fa:16:3e:ec:dc:0a", "network": {"id": "ba7c830d-c4a1-418f-a747-d997a634b13f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1412355570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33e55050ffe94a588a5db112563b5555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e7a9bbc-61", "ovs_interfaceid": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 603.822212] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:dc:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e7a9bbc-61fa-4ecf-8142-2568ff07c25c', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.833350] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Creating folder: Project (33e55050ffe94a588a5db112563b5555). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.835139] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b844712-2750-4095-9e7c-d11caab9d450 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.855817] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e829a2d-bc18-49c4-8ae8-7f45c1bd5c4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.864834] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Created folder: Project (33e55050ffe94a588a5db112563b5555) in parent group-v941773. [ 603.865135] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Creating folder: Instances. Parent ref: group-v941810. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.875878] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-567ab40a-4e0f-4a46-9133-d53dabb95a79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.878263] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 603.891710] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Created folder: Instances in parent group-v941810. [ 603.891710] env[62914]: DEBUG oslo.service.loopingcall [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.891710] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 603.892458] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c055f12f-7d34-4de1-8510-a4bf97f5b54b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.915984] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.915984] env[62914]: value = "task-4831278" [ 603.915984] env[62914]: _type = "Task" [ 603.915984] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.925614] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.946415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.946415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.946415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.946415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.947670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.951025] env[62914]: INFO nova.compute.manager [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Terminating instance [ 603.952152] env[62914]: DEBUG nova.compute.manager [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 603.952273] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 603.953138] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb386c40-c20e-4f6a-898d-ccceb8d0de53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.961368] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 603.961668] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6632a4eb-0a1c-44d5-9503-1f83c2d7a828 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.969710] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 603.969710] env[62914]: value = "task-4831279" [ 603.969710] env[62914]: _type = "Task" [ 603.969710] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.996058] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.000108] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831270, 'name': CloneVM_Task, 'duration_secs': 1.754532} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.000108] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Created linked-clone VM from snapshot [ 604.000108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac962a3-ec14-48e0-8a68-dc4e59f6aed4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.016678] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Uploading image 84649d91-ae10-4d24-9e47-5d49bd26b4b1 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 604.061403] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.305414} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.063258] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 604.064232] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3007fa25-263c-4093-83f5-49192659ce94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.094117] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 9e39cfb8-e277-4798-92b0-b54f310ef2f4/9e39cfb8-e277-4798-92b0-b54f310ef2f4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 604.098923] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 604.098923] env[62914]: value = "vm-941809" [ 604.098923] env[62914]: _type = "VirtualMachine" [ 604.098923] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 604.099182] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1c8ccd3-f668-41f2-9aa3-ba19f42d01cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.114366] env[62914]: DEBUG nova.network.neutron [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.117086] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-33fd24ca-e4e8-45c5-9b60-62fc37929aea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.133315] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831275, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.142187] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lease: (returnval){ [ 604.142187] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b7866-a520-c6e4-e005-e9b07d83b7ee" [ 604.142187] env[62914]: _type = "HttpNfcLease" [ 604.142187] env[62914]: } obtained for exporting VM: (result){ [ 604.142187] env[62914]: value = "vm-941809" [ 604.142187] env[62914]: _type = "VirtualMachine" [ 604.142187] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 604.146722] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the lease: (returnval){ [ 604.146722] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b7866-a520-c6e4-e005-e9b07d83b7ee" [ 604.146722] env[62914]: _type = "HttpNfcLease" [ 604.146722] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 604.146722] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 604.146722] env[62914]: value = "task-4831281" [ 604.146722] env[62914]: _type = "Task" [ 604.146722] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.155951] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updated VIF entry in instance network info cache for port a37b3d57-45a7-4167-970b-4734a54661f8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 604.156371] env[62914]: DEBUG nova.network.neutron [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updating instance_info_cache with network_info: [{"id": "a37b3d57-45a7-4167-970b-4734a54661f8", "address": "fa:16:3e:85:44:f6", "network": {"id": "3cc69eb5-cd59-4351-8a69-68f647db0af1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-590356650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfac7a5b4e7349688942cac59bd2adfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa37b3d57-45", "ovs_interfaceid": "a37b3d57-45a7-4167-970b-4734a54661f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.157779] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200647e-a949-9a7d-465b-cde7b29ca225, 'name': SearchDatastore_Task, 'duration_secs': 0.018934} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.160472] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2f2bd7-a253-4a5d-bc02-bb0a11069d0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.170275] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.170275] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 604.170275] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b7866-a520-c6e4-e005-e9b07d83b7ee" [ 604.170275] env[62914]: _type = "HttpNfcLease" [ 604.170275] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 604.170275] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 604.170275] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b7866-a520-c6e4-e005-e9b07d83b7ee" [ 604.170275] env[62914]: _type = "HttpNfcLease" [ 604.170275] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 604.174034] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799d8394-493f-41ee-8c9c-49317e3b3d86 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.178022] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 604.178022] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52adc764-1d78-46f9-2021-428f9fafa5aa" [ 604.178022] env[62914]: _type = "Task" [ 604.178022] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.190606] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 604.190788] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 604.264843] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52adc764-1d78-46f9-2021-428f9fafa5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.308624] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 604.323766] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-29671d5b-f20b-47f2-95d3-241ff0127493 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.365901] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.365901] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.366281] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.366281] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.370121] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.370121] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.370121] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.370121] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.370121] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.370357] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.370357] env[62914]: DEBUG nova.virt.hardware [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.370357] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e8d95e-1105-4594-ab22-9b2818a537c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.392800] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb4d360-b85d-41b8-a976-6948ef3475b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.424817] env[62914]: ERROR nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [req-d5c721d1-fd38-4970-8db5-d15f008ca895] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d5c721d1-fd38-4970-8db5-d15f008ca895"}]} [ 604.434533] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.456277] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 604.486255] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831279, 'name': PowerOffVM_Task, 'duration_secs': 0.450868} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.488855] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 604.488855] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 604.492385] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 604.492575] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 604.493805] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d4a392b-d5f4-4a0b-9efe-f36b91365932 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.510337] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 604.548661] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 604.581980] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 604.582196] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 604.582371] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleting the datastore file [datastore2] 43edad1f-cff0-4d3c-a721-98277d1cddc2 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 604.582634] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d51fdca9-9d0e-47d5-a26e-24d16140ecaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.596429] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for the task: (returnval){ [ 604.596429] env[62914]: value = "task-4831283" [ 604.596429] env[62914]: _type = "Task" [ 604.596429] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.609907] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.620639] env[62914]: INFO nova.compute.manager [-] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Took 1.97 seconds to deallocate network for instance. [ 604.669325] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808184} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.669757] env[62914]: DEBUG oslo_concurrency.lockutils [req-e63d7b8e-9a91-47ee-8bc2-65e0c4788924 req-425058a9-bebf-42d3-9f96-5737a2e30123 service nova] Releasing lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.672888] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494/a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 604.673155] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 604.676018] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-625c13a3-f338-4be2-8f82-17083de2c5d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.683058] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.699235] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 604.699235] env[62914]: value = "task-4831284" [ 604.699235] env[62914]: _type = "Task" [ 604.699235] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.710304] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52adc764-1d78-46f9-2021-428f9fafa5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.06118} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.711535] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.712343] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] e1018767-71e4-49c9-bd4d-02eae39dc26b/e1018767-71e4-49c9-bd4d-02eae39dc26b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 604.712804] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-608c76a5-aa0a-464d-972f-e75cb458f6e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.719631] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.738904] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 604.738904] env[62914]: value = "task-4831285" [ 604.738904] env[62914]: _type = "Task" [ 604.738904] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.750408] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.815743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.815969] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.911589] env[62914]: DEBUG nova.network.neutron [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Updating instance_info_cache with network_info: [{"id": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "address": "fa:16:3e:c4:ab:b0", "network": {"id": "8a08ae7c-cab2-44bd-9284-52c0337d5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-638020922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61536f203aa643608e7cca4cb14723d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a9ccc3c-c2", "ovs_interfaceid": "7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.948277] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.974156] env[62914]: DEBUG nova.compute.manager [None req-d086dd67-e16a-4334-b3bc-907d86112c4e tempest-ServerDiagnosticsTest-1960678720 tempest-ServerDiagnosticsTest-1960678720-project-admin] [instance: db31a794-3928-41bb-afd8-14fae9357654] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 604.975592] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3359c9a9-5f24-4a12-b7c4-46a20b6d94e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.993501] env[62914]: INFO nova.compute.manager [None req-d086dd67-e16a-4334-b3bc-907d86112c4e tempest-ServerDiagnosticsTest-1960678720 tempest-ServerDiagnosticsTest-1960678720-project-admin] [instance: db31a794-3928-41bb-afd8-14fae9357654] Retrieving diagnostics [ 604.994724] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beeb1f62-0913-4d50-9e47-7ee5e9fb9b53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.100062] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a198f55-abcf-4070-a065-2573accfbc7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.125245] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4aed53-77f4-4a16-86a8-679c96467e58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.130664] env[62914]: DEBUG oslo_vmware.api [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Task: {'id': task-4831283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174587} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.131109] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.131613] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 605.131915] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 605.132223] env[62914]: INFO nova.compute.manager [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Took 1.18 seconds to destroy the instance on the hypervisor. [ 605.132595] env[62914]: DEBUG oslo.service.loopingcall [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.133575] env[62914]: DEBUG nova.compute.manager [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 605.133831] env[62914]: DEBUG nova.network.neutron [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 605.168128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.173941] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ba84c-d80d-41f4-ba3c-11ad3c8fdd82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.183545] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.187393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0191b6-9b56-4c89-bcd2-2687e97abec1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.204416] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.218845] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.258732] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.325493] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 605.395757] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Successfully created port: b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.420212] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Releasing lock "refresh_cache-61e36e7b-aaa1-420e-bd43-f0184b56581b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.425436] env[62914]: DEBUG nova.compute.manager [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 605.427573] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37a5daf-771a-4699-86df-3de7550a1ed3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.448819] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.608573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.609966] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.664709] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Successfully updated port: fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.692904] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.740058] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.755222] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.756342] env[62914]: ERROR nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [req-05a05980-0aca-4138-85e5-c5f3450f17c5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-05a05980-0aca-4138-85e5-c5f3450f17c5"}]} [ 605.781512] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 605.798826] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 605.799078] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 21 to 22 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 605.799275] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.815488] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 605.838254] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 605.850111] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.940702] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.119838] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 606.173056] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.173056] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquired lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.173056] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 606.212681] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.222481] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.261935] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.324467] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2692fe0a-02cc-4779-b888-2198ae594514 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.338395] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755924ef-d912-428d-920e-6b38dced89a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.347491] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "ff2cff97-1671-4f97-8f69-532253169ff8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.347822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.387185] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3aaf56e-9bb9-4656-817b-64399dbb2ced {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.399079] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f479d5-fa24-4e5b-a520-0a2409cbcd68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.416945] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 606.441124] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.463485] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cffb4d3-7f0a-46b3-aa4b-72ca88948128 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.474738] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Doing hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 606.475950] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0e86a8b7-f6e4-4be5-bd73-a19b5c657b92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.491463] env[62914]: DEBUG oslo_vmware.api [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 606.491463] env[62914]: value = "task-4831286" [ 606.491463] env[62914]: _type = "Task" [ 606.491463] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.504866] env[62914]: DEBUG oslo_vmware.api [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831286, 'name': ResetVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.663027] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.693042] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.723321] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.932706} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.723631] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 606.724506] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f788bf9e-fe48-4fcd-a8f6-0448e6348ba4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.753057] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494/a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 606.756697] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6bd33ce-a893-4639-a747-0a49fb3ce476 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.775308] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.784034] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.785943] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 606.785943] env[62914]: value = "task-4831287" [ 606.785943] env[62914]: _type = "Task" [ 606.785943] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.797089] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831287, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.943092] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831278, 'name': CreateVM_Task, 'duration_secs': 2.893792} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.943535] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.944148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.944559] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.945138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.945489] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2fb5ba-c5af-4c50-bee9-7def2777e1e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.953669] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 606.953669] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5235eb7e-340e-86bd-8b7f-ea4146c58635" [ 606.953669] env[62914]: _type = "Task" [ 606.953669] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.969487] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5235eb7e-340e-86bd-8b7f-ea4146c58635, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.987047] env[62914]: DEBUG nova.scheduler.client.report [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updated inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with generation 22 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 606.987358] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 22 to 23 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 606.987534] env[62914]: DEBUG nova.compute.provider_tree [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 607.009221] env[62914]: DEBUG oslo_vmware.api [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831286, 'name': ResetVM_Task, 'duration_secs': 0.114647} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.009578] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Did hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 607.009755] env[62914]: DEBUG nova.compute.manager [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 607.010673] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5ac438-dcdb-4f18-a42e-194b7cfc3aea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.099155] env[62914]: DEBUG nova.network.neutron [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.194760] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831281, 'name': ReconfigVM_Task, 'duration_secs': 2.711935} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.198692] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 9e39cfb8-e277-4798-92b0-b54f310ef2f4/9e39cfb8-e277-4798-92b0-b54f310ef2f4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.198692] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbb073f7-4e63-4172-9abd-da4bd06c4ac6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.205155] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 607.205155] env[62914]: value = "task-4831288" [ 607.205155] env[62914]: _type = "Task" [ 607.205155] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.218480] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831288, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.257137] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.263090] env[62914]: DEBUG nova.network.neutron [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updating instance_info_cache with network_info: [{"id": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "address": "fa:16:3e:2f:34:dc", "network": {"id": "e8c1428c-2477-4bff-b70c-eb461db96ea5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-779065668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a83acb637b5c47f395d677ee48e37dae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7b353a-56", "ovs_interfaceid": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.302187] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831287, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.469674] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5235eb7e-340e-86bd-8b7f-ea4146c58635, 'name': SearchDatastore_Task, 'duration_secs': 0.055527} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.471983] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.471983] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.471983] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.471983] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.473118] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 607.473118] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61e02832-fa8a-4385-9dcc-cc28e0b11237 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.486867] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 607.487138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 607.488732] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d68bcb73-12d9-4246-8d01-0c91e001d46d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.498894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.240s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.505969] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 607.513753] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 607.513753] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525f4944-6ec0-b028-643c-c76dc4ef2037" [ 607.513753] env[62914]: _type = "Task" [ 607.513753] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.514077] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.246s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.516987] env[62914]: INFO nova.compute.claims [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.535192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5e047c-10ae-4383-80b7-f004231270d4 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.836s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.547863] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525f4944-6ec0-b028-643c-c76dc4ef2037, 'name': SearchDatastore_Task, 'duration_secs': 0.017246} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.548728] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2264cc6-9090-4575-9f13-40f897078bb4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.557228] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 607.557228] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10460-ef20-ca67-3048-8bdd48ea119a" [ 607.557228] env[62914]: _type = "Task" [ 607.557228] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.568950] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10460-ef20-ca67-3048-8bdd48ea119a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.607459] env[62914]: INFO nova.compute.manager [-] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Took 2.47 seconds to deallocate network for instance. [ 607.718391] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831288, 'name': Rename_Task, 'duration_secs': 0.286357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.718709] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 607.718969] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5491d49d-c1b4-4145-8b20-e6d708c624b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.727372] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 607.727372] env[62914]: value = "task-4831289" [ 607.727372] env[62914]: _type = "Task" [ 607.727372] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.737811] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.756364] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831285, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.558111} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.756364] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] e1018767-71e4-49c9-bd4d-02eae39dc26b/e1018767-71e4-49c9-bd4d-02eae39dc26b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 607.756364] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 607.756364] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5953f669-b13e-4e9a-ac6a-9ba6ab83b6c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.764991] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 607.764991] env[62914]: value = "task-4831290" [ 607.764991] env[62914]: _type = "Task" [ 607.764991] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.765548] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Releasing lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.766101] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Instance network_info: |[{"id": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "address": "fa:16:3e:2f:34:dc", "network": {"id": "e8c1428c-2477-4bff-b70c-eb461db96ea5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-779065668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a83acb637b5c47f395d677ee48e37dae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7b353a-56", "ovs_interfaceid": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 607.770691] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:34:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc7b353a-564b-4bbe-b0e1-85f5f54f7092', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.779657] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Creating folder: Project (a83acb637b5c47f395d677ee48e37dae). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.780313] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3f87b6a-ccf0-4490-a4d5-086b4401d6ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.788729] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.794485] env[62914]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 607.794693] env[62914]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62914) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 607.795076] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Folder already exists: Project (a83acb637b5c47f395d677ee48e37dae). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.795302] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Creating folder: Instances. Parent ref: group-v941774. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.795970] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fe35cb8-cdd7-4044-bdd2-be8e06edb367 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.801980] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831287, 'name': ReconfigVM_Task, 'duration_secs': 0.823725} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.802558] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfigured VM instance instance-00000009 to attach disk [datastore1] a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494/a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.803262] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d97e9464-435b-4ae0-b0ad-48cee3b3e37c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.811858] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 607.811858] env[62914]: value = "task-4831293" [ 607.811858] env[62914]: _type = "Task" [ 607.811858] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.813618] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Created folder: Instances in parent group-v941774. [ 607.813889] env[62914]: DEBUG oslo.service.loopingcall [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.817337] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 607.817608] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08664360-b87f-4faf-99bc-7c03b9c84fda {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.840179] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831293, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.841593] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.841593] env[62914]: value = "task-4831294" [ 607.841593] env[62914]: _type = "Task" [ 607.841593] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.852358] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831294, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.912511] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Received event network-changed-cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 607.912843] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Refreshing instance network info cache due to event network-changed-cf87f855-3a4c-43d5-a06f-db1eb5eec958. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 607.913195] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Acquiring lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.913450] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Acquired lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.913723] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Refreshing network info cache for port cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 608.024632] env[62914]: DEBUG nova.compute.utils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 608.026510] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 608.026748] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 608.076476] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10460-ef20-ca67-3048-8bdd48ea119a, 'name': SearchDatastore_Task, 'duration_secs': 0.01876} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.076712] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.076984] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb/2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 608.077284] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-614c1322-168d-4006-94e4-2aad130dcbf6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.089117] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 608.089117] env[62914]: value = "task-4831295" [ 608.089117] env[62914]: _type = "Task" [ 608.089117] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.099069] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831295, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.115834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.123207] env[62914]: DEBUG nova.policy [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 608.204508] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.204950] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.241388] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831289, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.277725] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124213} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.278285] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 608.280434] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acdbf35-10d0-4c50-92a9-fdf2ee19021b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.314879] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] e1018767-71e4-49c9-bd4d-02eae39dc26b/e1018767-71e4-49c9-bd4d-02eae39dc26b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.314879] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cbb611c-30cf-4a8e-a158-3e7424a8971e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.346254] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831293, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.353448] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 608.353448] env[62914]: value = "task-4831296" [ 608.353448] env[62914]: _type = "Task" [ 608.353448] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.363457] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831294, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.372036] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.456012] env[62914]: DEBUG nova.compute.manager [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 608.530285] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 608.604527] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831295, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.686299] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Successfully updated port: b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.750411] env[62914]: DEBUG oslo_vmware.api [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831289, 'name': PowerOnVM_Task, 'duration_secs': 0.773697} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.750735] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 608.750937] env[62914]: INFO nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Took 20.52 seconds to spawn the instance on the hypervisor. [ 608.751138] env[62914]: DEBUG nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 608.752096] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b31f94-039a-4d26-a470-8215a31f68bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.786433] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updated VIF entry in instance network info cache for port cf87f855-3a4c-43d5-a06f-db1eb5eec958. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 608.787065] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updating instance_info_cache with network_info: [{"id": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "address": "fa:16:3e:6b:7c:d2", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf87f855-3a", "ovs_interfaceid": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.826648] env[62914]: DEBUG nova.compute.manager [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Received event network-vif-plugged-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 608.826928] env[62914]: DEBUG oslo_concurrency.lockutils [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] Acquiring lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.827196] env[62914]: DEBUG oslo_concurrency.lockutils [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.827427] env[62914]: DEBUG oslo_concurrency.lockutils [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.827639] env[62914]: DEBUG nova.compute.manager [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] No waiting events found dispatching network-vif-plugged-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 608.827814] env[62914]: WARNING nova.compute.manager [req-644ae72c-8d21-4f06-8dbd-ba5c4baba277 req-0d966bd5-0a22-4d1f-b93e-c3f052fcfcdc service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Received unexpected event network-vif-plugged-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 for instance with vm_state building and task_state spawning. [ 608.844048] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831293, 'name': Rename_Task, 'duration_secs': 0.992413} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.844215] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 608.844442] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bef329c4-62bd-4609-8236-8512bdb47328 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.858894] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 608.858894] env[62914]: value = "task-4831297" [ 608.858894] env[62914]: _type = "Task" [ 608.858894] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.862151] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831294, 'name': CreateVM_Task, 'duration_secs': 0.529513} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.867923] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 608.868603] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.871687] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '783839bf-fb00-4fb0-a220-04047e7da454', 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941785', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'name': 'volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e69c36e9-3c59-48e3-9962-ffe8de10a789', 'attached_at': '', 'detached_at': '', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'serial': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd'}, 'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62914) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 608.871852] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Root volume attach. Driver type: vmdk {{(pid=62914) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 608.873328] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d469e08-3c92-4bc5-8392-9bc7eab67ac9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.879523] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.886508] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f440fe1b-8fff-47da-8fad-806e54944c7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.894691] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba28f7d-ddc4-4979-8da9-a9161199bfc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.903355] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-370286d9-5e8d-4c41-bce8-b4023f70af75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.915943] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 608.915943] env[62914]: value = "task-4831298" [ 608.915943] env[62914]: _type = "Task" [ 608.915943] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.925588] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Successfully created port: b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.933364] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.975488] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b285cd61-5346-4fee-919a-88ea1a01406a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.986541] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.988417] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafd2df9-712a-494a-aed0-d95dc2a4798e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.026463] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496f0a84-49fe-4e07-bf22-f5ead00722dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.036936] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9e786b-03bc-4b46-847d-97588586f928 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.061202] env[62914]: DEBUG nova.compute.provider_tree [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.106941] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831295, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744039} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.106941] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb/2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 609.107212] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.110699] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40aa48f5-cb9c-4831-9daf-787e2100cc53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.116231] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 609.116231] env[62914]: value = "task-4831299" [ 609.116231] env[62914]: _type = "Task" [ 609.116231] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.126266] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.193121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.193121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquired lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.193121] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.273964] env[62914]: INFO nova.compute.manager [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Took 26.10 seconds to build instance. [ 609.290584] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Releasing lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.290584] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received event network-vif-plugged-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 609.290992] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Acquiring lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.290992] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.291128] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.291314] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] No waiting events found dispatching network-vif-plugged-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 609.291504] env[62914]: WARNING nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received unexpected event network-vif-plugged-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c for instance with vm_state building and task_state spawning. [ 609.291694] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 609.291874] env[62914]: DEBUG nova.compute.manager [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing instance network info cache due to event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 609.292899] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Acquiring lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.292899] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Acquired lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.292899] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 609.372961] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831296, 'name': ReconfigVM_Task, 'duration_secs': 0.960756} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.373748] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Reconfigured VM instance instance-0000000a to attach disk [datastore1] e1018767-71e4-49c9-bd4d-02eae39dc26b/e1018767-71e4-49c9-bd4d-02eae39dc26b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.374938] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d77456f-af12-4a95-b6b1-9747a50d10f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.383281] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.393451] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 609.393451] env[62914]: value = "task-4831300" [ 609.393451] env[62914]: _type = "Task" [ 609.393451] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.403545] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831300, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.429371] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "db31a794-3928-41bb-afd8-14fae9357654" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.429371] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.429371] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "db31a794-3928-41bb-afd8-14fae9357654-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.429371] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.429724] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.433756] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 22%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.434198] env[62914]: INFO nova.compute.manager [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Terminating instance [ 609.439281] env[62914]: DEBUG nova.compute.manager [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 609.439548] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 609.440737] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3101110a-eb47-4d65-9148-b0b038cbe930 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.449945] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 609.450912] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e3e7be8-4ab5-4ba7-9c37-076841049d16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.458954] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 609.458954] env[62914]: value = "task-4831301" [ 609.458954] env[62914]: _type = "Task" [ 609.458954] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.471023] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.551697] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 609.567024] env[62914]: DEBUG nova.scheduler.client.report [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.631986] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127993} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.632291] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.633207] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02165e68-7234-48e1-9586-3eba41f08cbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.665107] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb/2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.667919] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 609.668196] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 609.668454] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.668665] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 609.668820] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.668972] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 609.669213] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 609.669426] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 609.669619] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 609.669783] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 609.669955] env[62914]: DEBUG nova.virt.hardware [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 609.671349] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25d195e3-4a6e-4930-b8e1-12c8283e8344 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.687044] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d99a26-9ad2-4b56-a5e6-f1f5be65006d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.702813] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c411d4f5-a9d0-415a-80f3-525206bf04f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.708426] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 609.708426] env[62914]: value = "task-4831302" [ 609.708426] env[62914]: _type = "Task" [ 609.708426] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.728296] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.748729] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.779019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fbb15ce3-d1b4-4098-99ad-7e0003223c68 tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.620s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.878665] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.908375] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831300, 'name': Rename_Task, 'duration_secs': 0.406279} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.908720] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 609.908995] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-424b2537-e884-48af-aa3f-9b2d0c2a3dab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.919222] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 609.919222] env[62914]: value = "task-4831303" [ 609.919222] env[62914]: _type = "Task" [ 609.919222] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.935554] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 38%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.939275] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.945703] env[62914]: DEBUG nova.network.neutron [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Updating instance_info_cache with network_info: [{"id": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "address": "fa:16:3e:48:b6:8a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ce4bc2-c0", "ovs_interfaceid": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.972169] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831301, 'name': PowerOffVM_Task, 'duration_secs': 0.446353} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.972499] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 609.972679] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 609.972941] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edbc6190-9541-444c-ba35-5f5bc9c8b84d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.049758] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 610.052443] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 610.052443] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Deleting the datastore file [datastore2] db31a794-3928-41bb-afd8-14fae9357654 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 610.052443] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c08414c-12ed-4cdf-bc50-1faff1f431b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.061320] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for the task: (returnval){ [ 610.061320] env[62914]: value = "task-4831305" [ 610.061320] env[62914]: _type = "Task" [ 610.061320] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.082697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.082697] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 610.083550] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.083731] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.916s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.084175] env[62914]: DEBUG nova.objects.instance [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lazy-loading 'resources' on Instance uuid 52097338-887e-4c79-8413-abfd7ea26c96 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 610.224998] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.273895] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updated VIF entry in instance network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 610.274317] env[62914]: DEBUG nova.network.neutron [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updating instance_info_cache with network_info: [{"id": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "address": "fa:16:3e:ec:dc:0a", "network": {"id": "ba7c830d-c4a1-418f-a747-d997a634b13f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1412355570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33e55050ffe94a588a5db112563b5555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e7a9bbc-61", "ovs_interfaceid": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.280639] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 610.383073] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.439626] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.444763] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831303, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.448654] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Releasing lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.449145] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Instance network_info: |[{"id": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "address": "fa:16:3e:48:b6:8a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ce4bc2-c0", "ovs_interfaceid": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 610.449700] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:b6:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.462790] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Creating folder: Project (3705e459bbc244e2b1056a8219071ff5). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 610.463344] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab75c666-ac53-44fb-a4f3-155962eeb65e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.479072] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Created folder: Project (3705e459bbc244e2b1056a8219071ff5) in parent group-v941773. [ 610.479072] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Creating folder: Instances. Parent ref: group-v941815. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 610.479386] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb664f70-ff7e-46ae-85de-e987a0fc12c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.496066] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Created folder: Instances in parent group-v941815. [ 610.496493] env[62914]: DEBUG oslo.service.loopingcall [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.496815] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 610.497463] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee5b5f43-f7c1-4909-9290-c2a6163b48e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.532711] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.532711] env[62914]: value = "task-4831308" [ 610.532711] env[62914]: _type = "Task" [ 610.532711] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.544809] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831308, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.578472] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.593807] env[62914]: DEBUG nova.compute.utils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.598764] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 610.598764] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 610.724539] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831302, 'name': ReconfigVM_Task, 'duration_secs': 0.682612} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.728834] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb/2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 610.729815] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63f68f9f-5d5a-4c6b-b15a-871c1ba7dd43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.747024] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 610.747024] env[62914]: value = "task-4831309" [ 610.747024] env[62914]: _type = "Task" [ 610.747024] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.761910] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.762324] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.762493] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831309, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.771201] env[62914]: DEBUG nova.policy [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5da18e2dc49746d8a7125efdc106d62b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd271710592bf47b79e16552221fe7107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.780710] env[62914]: DEBUG oslo_concurrency.lockutils [req-848348a1-4daa-4f74-a734-fb4dc6ab4453 req-56eb7824-c345-487f-94d3-13398a5acd32 service nova] Releasing lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.804539] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.881958] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.937986] env[62914]: DEBUG oslo_vmware.api [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831303, 'name': PowerOnVM_Task, 'duration_secs': 0.994946} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.946103] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 610.946420] env[62914]: INFO nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Took 17.89 seconds to spawn the instance on the hypervisor. [ 610.946620] env[62914]: DEBUG nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 610.946970] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 62%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.948175] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6083b60-375e-4fc9-b629-82ea54ee6cae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.050827] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831308, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.055626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aecccdb-0aa8-42f7-ba9d-3476fd1f883b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.074460] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7e74ad-1175-4653-b36e-06994a50ae09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.087216] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.121691] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 611.126543] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d4c439-103b-4c33-a7a3-6bdd57f3af8c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.139923] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56d39d5-e6e9-4ab7-aca2-edb038583e9d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.162900] env[62914]: DEBUG nova.compute.provider_tree [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.262486] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831309, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.381316] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 68%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.436820] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 75%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.476844] env[62914]: INFO nova.compute.manager [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Took 23.54 seconds to build instance. [ 611.551638] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831308, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.578354] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.667634] env[62914]: DEBUG nova.scheduler.client.report [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 611.761693] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831309, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.884090] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.940498] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.983555] env[62914]: DEBUG oslo_concurrency.lockutils [None req-216313be-6f1a-4231-b1f5-6119fbf2b400 tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.061s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.055656] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831308, 'name': CreateVM_Task, 'duration_secs': 1.440787} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.056300] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 612.057419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.057419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.058442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 612.059771] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Successfully created port: 5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.062173] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5949c260-8e86-413f-9382-221c4c9da988 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.073829] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 612.073829] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e2099f-89f9-b5c1-72ed-f22a51aadc61" [ 612.073829] env[62914]: _type = "Task" [ 612.073829] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.083098] env[62914]: DEBUG oslo_vmware.api [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Task: {'id': task-4831305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.973217} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.087024] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.087296] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 612.087486] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 612.088193] env[62914]: INFO nova.compute.manager [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] [instance: db31a794-3928-41bb-afd8-14fae9357654] Took 2.65 seconds to destroy the instance on the hypervisor. [ 612.088193] env[62914]: DEBUG oslo.service.loopingcall [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.088577] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e2099f-89f9-b5c1-72ed-f22a51aadc61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.088713] env[62914]: DEBUG nova.compute.manager [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 612.088713] env[62914]: DEBUG nova.network.neutron [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 612.141031] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 612.176402] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.176775] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.177173] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.181780] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.181998] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.181998] env[62914]: DEBUG nova.virt.hardware [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.181998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.183236] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e81d4f1-c35f-4966-9cd7-72cd75d24354 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.191068] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Successfully updated port: b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 612.192916] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.343s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.195241] env[62914]: INFO nova.compute.claims [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.209471] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e03032-dd12-48c7-8081-8202627508b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.247700] env[62914]: INFO nova.scheduler.client.report [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Deleted allocations for instance 52097338-887e-4c79-8413-abfd7ea26c96 [ 612.266928] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831309, 'name': Rename_Task, 'duration_secs': 1.29675} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.267983] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 612.267983] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2683e45-88f9-4b36-a548-953ce6df66da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.276860] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 612.276860] env[62914]: value = "task-4831310" [ 612.276860] env[62914]: _type = "Task" [ 612.276860] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.293360] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.382653] env[62914]: DEBUG oslo_vmware.api [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831297, 'name': PowerOnVM_Task, 'duration_secs': 3.475095} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.382996] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 612.383177] env[62914]: INFO nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Took 21.74 seconds to spawn the instance on the hypervisor. [ 612.383436] env[62914]: DEBUG nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 612.384513] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7174a98d-f73c-404e-93a1-e72718574303 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.435320] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 97%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.469718] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Received event network-vif-deleted-a977117b-c407-4071-a0e5-5a31734d1025 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 612.469718] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Received event network-vif-deleted-90a32b17-6fbf-4efa-99d6-610bd414847b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 612.470352] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Received event network-vif-plugged-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 612.470445] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Acquiring lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.471025] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.471025] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.471025] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] No waiting events found dispatching network-vif-plugged-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 612.471154] env[62914]: WARNING nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Received unexpected event network-vif-plugged-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b for instance with vm_state building and task_state spawning. [ 612.471283] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Received event network-changed-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 612.471440] env[62914]: DEBUG nova.compute.manager [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Refreshing instance network info cache due to event network-changed-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 612.471629] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Acquiring lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.471761] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Acquired lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.471913] env[62914]: DEBUG nova.network.neutron [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Refreshing network info cache for port b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 612.486910] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 612.591032] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e2099f-89f9-b5c1-72ed-f22a51aadc61, 'name': SearchDatastore_Task, 'duration_secs': 0.040599} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.591148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.591948] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.591948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.591948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.592174] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 612.592485] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb50670d-8496-4b46-a0c2-522ad598b017 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.606595] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 612.606857] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 612.607759] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d84e0c-2ff2-4f87-989a-3b9b125d2a71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.617184] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 612.617184] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ca2553-ab47-cfd1-6a47-5c0163583aa9" [ 612.617184] env[62914]: _type = "Task" [ 612.617184] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.627697] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ca2553-ab47-cfd1-6a47-5c0163583aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.700238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.700388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.700541] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.767079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9dd46ab1-2a15-4140-bb55-eebf4e50d4a5 tempest-DeleteServersAdminTestJSON-1168096843 tempest-DeleteServersAdminTestJSON-1168096843-project-admin] Lock "52097338-887e-4c79-8413-abfd7ea26c96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.293s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.799482] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.912217] env[62914]: INFO nova.compute.manager [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Took 27.30 seconds to build instance. [ 612.935541] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 97%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.028419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.133771] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ca2553-ab47-cfd1-6a47-5c0163583aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.044359} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.137606] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da59dd7d-5d6c-4163-9f53-12e89644a3bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.144704] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 613.144704] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525f1349-5002-cf68-bc1d-878e4f603c4f" [ 613.144704] env[62914]: _type = "Task" [ 613.144704] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.155541] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525f1349-5002-cf68-bc1d-878e4f603c4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.305252] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.309152] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.415756] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9abe73e5-c7f3-43b0-a8b0-7fc6a004fda5 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.847s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.439756] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task} progress is 98%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.492541] env[62914]: DEBUG nova.network.neutron [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.582593] env[62914]: DEBUG nova.compute.manager [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Received event network-changed-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 613.588139] env[62914]: DEBUG nova.compute.manager [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Refreshing instance network info cache due to event network-changed-fc7b353a-564b-4bbe-b0e1-85f5f54f7092. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 613.588139] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] Acquiring lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.588139] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] Acquired lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.588139] env[62914]: DEBUG nova.network.neutron [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Refreshing network info cache for port fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 613.626964] env[62914]: DEBUG nova.network.neutron [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Updated VIF entry in instance network info cache for port b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 613.627365] env[62914]: DEBUG nova.network.neutron [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Updating instance_info_cache with network_info: [{"id": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "address": "fa:16:3e:48:b6:8a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ce4bc2-c0", "ovs_interfaceid": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.660213] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525f1349-5002-cf68-bc1d-878e4f603c4f, 'name': SearchDatastore_Task, 'duration_secs': 0.020455} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.662061] env[62914]: DEBUG nova.network.neutron [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.662777] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.662875] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] ea214cc0-0f7a-4aee-9906-8d47e660c8f7/ea214cc0-0f7a-4aee-9906-8d47e660c8f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 613.663568] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a083652-b4c4-43cf-a266-79192f174939 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.673339] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 613.673339] env[62914]: value = "task-4831311" [ 613.673339] env[62914]: _type = "Task" [ 613.673339] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.674022] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b34522-4476-474e-880b-9401b162f6af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.686740] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.689633] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56d59da-b38a-4697-9669-2a476ae29d0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.726903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa8692d-db27-4524-8710-5aad3c8e479b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.736286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384b025c-dc91-40bd-8e6a-afe8f2ef8843 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.743673] env[62914]: DEBUG nova.compute.manager [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-vif-plugged-b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 613.743911] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.744066] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.744203] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.744369] env[62914]: DEBUG nova.compute.manager [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] No waiting events found dispatching network-vif-plugged-b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 613.744716] env[62914]: WARNING nova.compute.manager [req-b4ca08da-524b-4895-9096-179972ce1a5b req-d080d803-e29f-44cd-8f43-b464adc1c759 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received unexpected event network-vif-plugged-b92603ac-8bea-4f9a-aa50-8c942106916d for instance with vm_state building and task_state spawning. [ 613.759423] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.792688] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.922838] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 613.938128] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831298, 'name': RelocateVM_Task, 'duration_secs': 4.739821} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.938580] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 613.939656] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941785', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'name': 'volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e69c36e9-3c59-48e3-9962-ffe8de10a789', 'attached_at': '', 'detached_at': '', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'serial': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 613.939819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e77f20-4bb6-4aac-bd21-00ee7e2c9aa9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.956795] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9da949-2ff5-48af-ac0a-c1e676ef9f6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.980722] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd/volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.981810] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a72a87ed-6fa8-4a22-9326-d4378c6ece31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.997179] env[62914]: INFO nova.compute.manager [-] [instance: db31a794-3928-41bb-afd8-14fae9357654] Took 1.91 seconds to deallocate network for instance. [ 614.005764] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 614.005764] env[62914]: value = "task-4831312" [ 614.005764] env[62914]: _type = "Task" [ 614.005764] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.015899] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.137222] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8c1d339-41b6-47a6-903a-662f9eff748f req-1dbeb2c1-ae9f-4670-b192-b1237371d7ea service nova] Releasing lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.165032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.165582] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Instance network_info: |[{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 614.166102] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:69:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b92603ac-8bea-4f9a-aa50-8c942106916d', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.177112] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating folder: Project (2562164f04b045a59b3b501d2b0014ec). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.177112] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb4a6dd-c9e0-4b08-b02c-73331e3fd859 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.196119] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.291971] env[62914]: ERROR nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [req-98b2bf4d-34f8-49df-8513-68a673a9d591] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-98b2bf4d-34f8-49df-8513-68a673a9d591"}]} [ 614.308905] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.311529] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created folder: Project (2562164f04b045a59b3b501d2b0014ec) in parent group-v941773. [ 614.311743] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating folder: Instances. Parent ref: group-v941818. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.312144] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-269bf970-28c3-4eef-aa17-7e0bf5c30662 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.328241] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created folder: Instances in parent group-v941818. [ 614.329111] env[62914]: DEBUG oslo.service.loopingcall [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.330883] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 614.334989] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 614.336154] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d10253c5-e485-4780-a527-7cb9b0fd437f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.358227] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 614.358501] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.367761] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.367761] env[62914]: value = "task-4831315" [ 614.367761] env[62914]: _type = "Task" [ 614.367761] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.384972] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831315, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.386515] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 614.421318] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 614.456068] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.507717] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.521632] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.558981] env[62914]: DEBUG nova.network.neutron [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updated VIF entry in instance network info cache for port fc7b353a-564b-4bbe-b0e1-85f5f54f7092. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 614.559690] env[62914]: DEBUG nova.network.neutron [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updating instance_info_cache with network_info: [{"id": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "address": "fa:16:3e:2f:34:dc", "network": {"id": "e8c1428c-2477-4bff-b70c-eb461db96ea5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-779065668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a83acb637b5c47f395d677ee48e37dae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7b353a-56", "ovs_interfaceid": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.564612] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Successfully updated port: 5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.704227] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831311, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.736877] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.737482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.738600] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.738600] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.738600] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.754604] env[62914]: INFO nova.compute.manager [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Terminating instance [ 614.761156] env[62914]: DEBUG nova.compute.manager [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 614.761472] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 614.762864] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216cefeb-b096-4bbc-9c43-f27329c41978 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.784097] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 614.788184] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d38ada84-7df0-4485-850d-75206509c475 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.801992] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.803422] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 614.803422] env[62914]: value = "task-4831316" [ 614.803422] env[62914]: _type = "Task" [ 614.803422] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.818821] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.884811] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831315, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.902044] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec07ada-1aa7-46e2-8b11-08180cc86899 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.915994] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aac556b-ec28-4fe7-a7db-159dd0342b43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.964068] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515b23e8-3497-465a-add7-78fa274a5818 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.974650] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a0aebb-b9a3-4b35-ae2a-bb6c4ceed2be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.991989] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 615.022628] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.072408] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.072608] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.072871] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.074948] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d487f6c-3472-4d94-be94-8465edaa066b req-e9e655f6-867b-490f-9069-6bc7f96809b5 service nova] Releasing lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.190782] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831311, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.426518} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.191194] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] ea214cc0-0f7a-4aee-9906-8d47e660c8f7/ea214cc0-0f7a-4aee-9906-8d47e660c8f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 615.191310] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.191576] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fe2bfd6-cf12-471f-b970-8c7567fa9949 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.201113] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 615.201113] env[62914]: value = "task-4831317" [ 615.201113] env[62914]: _type = "Task" [ 615.201113] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.211110] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831317, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.261153] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "69a9cd15-7d6f-464d-b451-e193179088f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.261482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.280510] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 615.280913] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec8f641-6b82-4dda-95ad-dc3863498a8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.292760] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 615.292939] env[62914]: ERROR oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk due to incomplete transfer. [ 615.297266] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-10211b37-8f63-4d20-a83b-49688eae0833 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.299709] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.310035] env[62914]: DEBUG oslo_vmware.rw_handles [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9f08c-d563-2041-6f11-7c8fe76594ac/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 615.310291] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Uploaded image 84649d91-ae10-4d24-9e47-5d49bd26b4b1 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 615.312523] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 615.313382] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-76995a84-4292-47bc-9402-0eb2a26ced7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.320028] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831316, 'name': PowerOffVM_Task, 'duration_secs': 0.295584} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.321829] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 615.322632] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 615.322632] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 615.322632] env[62914]: value = "task-4831318" [ 615.322632] env[62914]: _type = "Task" [ 615.322632] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.322632] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c0a9ae9-0708-4857-95fe-4306a6609d6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.336977] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831318, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.384820] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831315, 'name': CreateVM_Task, 'duration_secs': 0.572601} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.385104] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 615.385988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.386764] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.387241] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 615.387627] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5bbf2eb-2f16-423b-afa2-6f495108d9db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.394858] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 615.394858] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a86f21-afdb-aea7-b364-ebe13fcc7eb8" [ 615.394858] env[62914]: _type = "Task" [ 615.394858] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.409367] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a86f21-afdb-aea7-b364-ebe13fcc7eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.441060] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 615.441060] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 615.441392] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Deleting the datastore file [datastore2] 61e36e7b-aaa1-420e-bd43-f0184b56581b {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 615.441847] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47241055-dc08-41eb-9ec8-2bc58d2d1833 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.451280] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for the task: (returnval){ [ 615.451280] env[62914]: value = "task-4831320" [ 615.451280] env[62914]: _type = "Task" [ 615.451280] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.462870] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.521605] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831312, 'name': ReconfigVM_Task, 'duration_secs': 1.404751} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.521707] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Reconfigured VM instance instance-0000000c to attach disk [datastore1] volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd/volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.528750] env[62914]: ERROR nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [req-a40a315b-f87c-4bbb-aac6-c3b0b152caf9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a40a315b-f87c-4bbb-aac6-c3b0b152caf9"}]} [ 615.530541] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62b27c92-094c-4751-b3db-849c38f52ee8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.550297] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 615.550297] env[62914]: value = "task-4831321" [ 615.550297] env[62914]: _type = "Task" [ 615.550297] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.563524] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.565586] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 615.598771] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 615.599568] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 615.614971] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 615.640152] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 615.669552] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.719729] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831317, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196425} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.719729] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.720229] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d4fc06-3e17-4dcc-82cd-e2ab65272557 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.754969] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] ea214cc0-0f7a-4aee-9906-8d47e660c8f7/ea214cc0-0f7a-4aee-9906-8d47e660c8f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.754969] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7391e6ed-a0f4-4d2f-9f76-59e61dca2699 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.781792] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 615.781792] env[62914]: value = "task-4831322" [ 615.781792] env[62914]: _type = "Task" [ 615.781792] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.804872] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831322, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.816896] env[62914]: DEBUG oslo_vmware.api [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831310, 'name': PowerOnVM_Task, 'duration_secs': 3.343114} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.817056] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 615.817322] env[62914]: INFO nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Took 16.47 seconds to spawn the instance on the hypervisor. [ 615.817478] env[62914]: DEBUG nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 615.818432] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9ab93c-cc25-460b-9218-88edaeb5429b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.844631] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831318, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.913028] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a86f21-afdb-aea7-b364-ebe13fcc7eb8, 'name': SearchDatastore_Task, 'duration_secs': 0.015878} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.913324] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.914523] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.914523] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.914523] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.914523] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.918249] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57825479-7a30-4edd-ba8d-209f400af2f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.932596] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.932794] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 615.936699] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e183f8d3-226d-453a-b7db-193dc84a274f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.947381] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 615.947381] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e9529d-e8fc-41b1-5f72-f5ab6f17efd5" [ 615.947381] env[62914]: _type = "Task" [ 615.947381] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.959721] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e9529d-e8fc-41b1-5f72-f5ab6f17efd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.966928] env[62914]: DEBUG oslo_vmware.api [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Task: {'id': task-4831320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271285} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.966928] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.966928] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 615.966928] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 615.967387] env[62914]: INFO nova.compute.manager [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Took 1.21 seconds to destroy the instance on the hypervisor. [ 615.967387] env[62914]: DEBUG oslo.service.loopingcall [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.967670] env[62914]: DEBUG nova.compute.manager [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 615.969794] env[62914]: DEBUG nova.network.neutron [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 616.065690] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831321, 'name': ReconfigVM_Task, 'duration_secs': 0.30984} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.069533] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941785', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'name': 'volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e69c36e9-3c59-48e3-9962-ffe8de10a789', 'attached_at': '', 'detached_at': '', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'serial': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 616.070307] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f9de446-1d82-4cc3-b677-ed573940b7fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.081399] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 616.081399] env[62914]: value = "task-4831323" [ 616.081399] env[62914]: _type = "Task" [ 616.081399] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.091800] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831323, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.118562] env[62914]: DEBUG nova.network.neutron [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Updating instance_info_cache with network_info: [{"id": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "address": "fa:16:3e:a5:0d:df", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dee04a2-56", "ovs_interfaceid": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.160763] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e0107f-fdd4-4ce8-93e0-b355cc6f36cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.170656] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b34294-ec9f-4421-b47e-568b7b541653 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.204115] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763f09b2-7e54-4a1b-90ce-6747f9184512 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.213055] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cac7ef0-98aa-4e5f-b54c-c386570a8289 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.229870] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 616.296156] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831322, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.343328] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831318, 'name': Destroy_Task, 'duration_secs': 0.745389} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.343722] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Destroyed the VM [ 616.344115] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 616.348564] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-60ebec18-c435-4b5f-9913-cfe2efa3cb01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.348564] env[62914]: INFO nova.compute.manager [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Took 21.47 seconds to build instance. [ 616.356464] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 616.356464] env[62914]: value = "task-4831324" [ 616.356464] env[62914]: _type = "Task" [ 616.356464] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.375056] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831324, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.468125] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e9529d-e8fc-41b1-5f72-f5ab6f17efd5, 'name': SearchDatastore_Task, 'duration_secs': 0.01648} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.471172] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2cefe90-202b-4f5c-a231-359d073d537c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.480661] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 616.480661] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c4cc55-6a49-e806-7ff2-e65bf1dcec23" [ 616.480661] env[62914]: _type = "Task" [ 616.480661] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.491559] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c4cc55-6a49-e806-7ff2-e65bf1dcec23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.595400] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831323, 'name': Rename_Task, 'duration_secs': 0.349304} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.596089] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 616.596701] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9798dfc-9e90-4802-9053-d173aab52d66 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.605331] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 616.605331] env[62914]: value = "task-4831325" [ 616.605331] env[62914]: _type = "Task" [ 616.605331] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.616184] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.622103] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.622457] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Instance network_info: |[{"id": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "address": "fa:16:3e:a5:0d:df", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dee04a2-56", "ovs_interfaceid": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 616.622913] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:0d:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dee04a2-563a-4fb4-8651-bec18ae531ea', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.631183] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating folder: Project (d271710592bf47b79e16552221fe7107). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.631536] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a35a7d06-e7db-4b49-919b-4d568a1278ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.642881] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created folder: Project (d271710592bf47b79e16552221fe7107) in parent group-v941773. [ 616.643113] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating folder: Instances. Parent ref: group-v941821. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.643423] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc2bd588-5ce0-43d1-93ad-7dff7065439b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.657174] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created folder: Instances in parent group-v941821. [ 616.657463] env[62914]: DEBUG oslo.service.loopingcall [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.657681] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 616.657901] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28d601c1-8661-4ebc-b15a-55a771331e81 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.680875] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.680875] env[62914]: value = "task-4831328" [ 616.680875] env[62914]: _type = "Task" [ 616.680875] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.692804] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831328, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.768971] env[62914]: DEBUG nova.compute.manager [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-changed-b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 616.768971] env[62914]: DEBUG nova.compute.manager [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing instance network info cache due to event network-changed-b92603ac-8bea-4f9a-aa50-8c942106916d. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 616.770667] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.770667] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.770667] env[62914]: DEBUG nova.network.neutron [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing network info cache for port b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.799067] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831322, 'name': ReconfigVM_Task, 'duration_secs': 1.012871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.800464] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Reconfigured VM instance instance-0000000d to attach disk [datastore1] ea214cc0-0f7a-4aee-9906-8d47e660c8f7/ea214cc0-0f7a-4aee-9906-8d47e660c8f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.800940] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0c9beca-eba5-430e-b8f9-723118746cb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.809150] env[62914]: DEBUG nova.scheduler.client.report [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updated inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with generation 28 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 616.809150] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 28 to 29 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 616.809150] env[62914]: DEBUG nova.compute.provider_tree [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 616.815264] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 616.815264] env[62914]: value = "task-4831329" [ 616.815264] env[62914]: _type = "Task" [ 616.815264] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.825921] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831329, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.854691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32f38336-f19f-4df1-9336-8758539d4be9 tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.981s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.875573] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831324, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.995186] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c4cc55-6a49-e806-7ff2-e65bf1dcec23, 'name': SearchDatastore_Task, 'duration_secs': 0.017384} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.995562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.995918] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6fd5f3b8-1175-4bd5-b0b4-12517ba65271/6fd5f3b8-1175-4bd5-b0b4-12517ba65271.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 616.997034] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd39d92e-0b4d-480b-a044-9950a5e877dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.006127] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 617.006127] env[62914]: value = "task-4831330" [ 617.006127] env[62914]: _type = "Task" [ 617.006127] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.020746] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.108876] env[62914]: DEBUG nova.compute.manager [req-779a0a37-9cdb-4968-82fe-1e710c49656c req-88c324ae-6662-47a2-b8f5-a6e30e134709 service nova] [instance: db31a794-3928-41bb-afd8-14fae9357654] Received event network-vif-deleted-3b909a06-3f47-4b08-8330-c3ac1c957a35 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 617.119149] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.190987] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831328, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.317190] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.123s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.317190] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 617.322631] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.661s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.325219] env[62914]: INFO nova.compute.claims [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.347464] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831329, 'name': Rename_Task, 'duration_secs': 0.428242} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.347464] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 617.347675] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa302dd1-4f6e-4fa2-9188-b8608b6afacd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.360215] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 617.365635] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 617.365635] env[62914]: value = "task-4831331" [ 617.365635] env[62914]: _type = "Task" [ 617.365635] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.385804] env[62914]: DEBUG oslo_vmware.api [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831324, 'name': RemoveSnapshot_Task, 'duration_secs': 0.98394} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.389471] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 617.390404] env[62914]: INFO nova.compute.manager [None req-813d6923-3374-45b6-920c-91f4e2eb191f tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 17.59 seconds to snapshot the instance on the hypervisor. [ 617.392207] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.528022] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831330, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.619605] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.694044] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831328, 'name': CreateVM_Task, 'duration_secs': 0.530014} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.694044] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.694768] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.694946] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.695383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 617.695552] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c237353b-a5ef-40b3-8d4f-f11782bc2779 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.706119] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 617.706119] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fd0073-91ad-6ecd-47dc-2cc8abf17ad3" [ 617.706119] env[62914]: _type = "Task" [ 617.706119] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.712509] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fd0073-91ad-6ecd-47dc-2cc8abf17ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.724300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.724587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.724819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.725042] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.725220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.729451] env[62914]: INFO nova.compute.manager [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Terminating instance [ 617.734094] env[62914]: DEBUG nova.compute.manager [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 617.734208] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 617.735076] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dfa47d-a5cb-4aba-a8c2-34e14fdc7701 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.743535] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 617.744101] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b42c3a41-c62e-42fb-8c58-b39f52c1c911 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.751822] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 617.751822] env[62914]: value = "task-4831332" [ 617.751822] env[62914]: _type = "Task" [ 617.751822] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.762643] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.840287] env[62914]: DEBUG nova.compute.utils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.850893] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 617.851555] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 617.887144] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.902819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.002558] env[62914]: DEBUG nova.policy [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a426c3bd7944e69bacce135b47629f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e460060822e4eda931ae402635e9eb6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 618.023432] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612476} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.024900] env[62914]: DEBUG nova.network.neutron [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updated VIF entry in instance network info cache for port b92603ac-8bea-4f9a-aa50-8c942106916d. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 618.025149] env[62914]: DEBUG nova.network.neutron [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.026561] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6fd5f3b8-1175-4bd5-b0b4-12517ba65271/6fd5f3b8-1175-4bd5-b0b4-12517ba65271.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 618.028229] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 618.028229] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49f68924-bb42-4fba-a3da-6c4c3593ed2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.037202] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 618.037202] env[62914]: value = "task-4831333" [ 618.037202] env[62914]: _type = "Task" [ 618.037202] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.049848] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831333, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.084377] env[62914]: DEBUG nova.network.neutron [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.118533] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.217753] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fd0073-91ad-6ecd-47dc-2cc8abf17ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.046798} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.218123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.218438] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.218703] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.218857] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.219099] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.219460] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7298c8ce-1e0b-43b9-aced-9b224169adcf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.237353] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.237815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 618.239879] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b6132b1-d287-4dc7-9b60-7169299cb39b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.251120] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 618.251120] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123e00-a210-ec05-c8b9-38790f498dff" [ 618.251120] env[62914]: _type = "Task" [ 618.251120] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.266017] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123e00-a210-ec05-c8b9-38790f498dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.268519] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.354065] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 618.393751] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.529206] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.529206] env[62914]: DEBUG nova.compute.manager [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Received event network-vif-plugged-5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 618.529989] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Acquiring lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.529989] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.530141] env[62914]: DEBUG oslo_concurrency.lockutils [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.530260] env[62914]: DEBUG nova.compute.manager [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] No waiting events found dispatching network-vif-plugged-5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.530511] env[62914]: WARNING nova.compute.manager [req-7be59486-adda-471e-82ef-fcd7de14b797 req-3b9b8ff8-2060-47bb-833f-308d367101d8 service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Received unexpected event network-vif-plugged-5dee04a2-563a-4fb4-8651-bec18ae531ea for instance with vm_state building and task_state spawning. [ 618.556942] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143977} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.560095] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 618.561716] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9427f75b-dfd2-46a0-bf64-121de9f028b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.587839] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 6fd5f3b8-1175-4bd5-b0b4-12517ba65271/6fd5f3b8-1175-4bd5-b0b4-12517ba65271.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 618.593186] env[62914]: INFO nova.compute.manager [-] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Took 2.62 seconds to deallocate network for instance. [ 618.594087] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74582780-32de-4951-9fa4-50559ad58acf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.628051] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 618.628051] env[62914]: value = "task-4831334" [ 618.628051] env[62914]: _type = "Task" [ 618.628051] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.635822] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.653470] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.659492] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Successfully created port: 99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.777305] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123e00-a210-ec05-c8b9-38790f498dff, 'name': SearchDatastore_Task, 'duration_secs': 0.027531} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.782788] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa05d0e9-530e-4aa6-bf0b-f339b204fd2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.792068] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831332, 'name': PowerOffVM_Task, 'duration_secs': 0.818063} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.792248] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 618.792412] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 618.793125] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05d7c55b-c2f4-44a1-a5fd-12954a8a9de7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.800176] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 618.800176] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b20c8-abc4-b1b2-d230-284c205cce88" [ 618.800176] env[62914]: _type = "Task" [ 618.800176] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.808837] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b20c8-abc4-b1b2-d230-284c205cce88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.888163] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.889466] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4252aef2-7ab8-4d4a-b935-f6b285328a6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.900527] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbc595b-8997-483c-8494-2be6c1092215 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.934789] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a91d973-fd17-405e-a7be-c640c45ea428 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.946059] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf5dfa1-7c4c-4b69-8b39-3e4d1f1e8030 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.961963] env[62914]: DEBUG nova.compute.provider_tree [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.088275] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 619.088686] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 619.089620] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleting the datastore file [datastore1] 9e39cfb8-e277-4798-92b0-b54f310ef2f4 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.091913] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7c096f6-ad8d-49de-b9d7-63666f8169cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.101903] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for the task: (returnval){ [ 619.101903] env[62914]: value = "task-4831336" [ 619.101903] env[62914]: _type = "Task" [ 619.101903] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.110960] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.125992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.127949] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.143988] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.315125] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b20c8-abc4-b1b2-d230-284c205cce88, 'name': SearchDatastore_Task, 'duration_secs': 0.027389} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.315577] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.316138] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1/6a9c973f-8aea-4403-9fa2-d37e5eec1ee1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 619.316787] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-674b3651-01db-4839-9c26-92a94a2d0e55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.337615] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 619.337615] env[62914]: value = "task-4831337" [ 619.337615] env[62914]: _type = "Task" [ 619.337615] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.361532] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.373083] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 619.388906] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.427388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.427388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.440307] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 619.440307] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 619.440307] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.440591] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 619.440591] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.443421] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 619.443421] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 619.443421] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 619.443421] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 619.443421] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 619.444921] env[62914]: DEBUG nova.virt.hardware [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.444921] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fcc1d8-c7d9-46c9-9ec8-52473b0c9262 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.457136] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a11249-049c-4101-957a-09894539c6ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.477617] env[62914]: DEBUG nova.scheduler.client.report [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.614128] env[62914]: DEBUG oslo_vmware.api [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Task: {'id': task-4831336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39587} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.614836] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 619.614836] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 619.615128] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 619.615341] env[62914]: INFO nova.compute.manager [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Took 1.88 seconds to destroy the instance on the hypervisor. [ 619.615724] env[62914]: DEBUG oslo.service.loopingcall [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.616100] env[62914]: DEBUG nova.compute.manager [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 619.616258] env[62914]: DEBUG nova.network.neutron [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 619.629899] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.647816] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831334, 'name': ReconfigVM_Task, 'duration_secs': 0.65528} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.647816] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 6fd5f3b8-1175-4bd5-b0b4-12517ba65271/6fd5f3b8-1175-4bd5-b0b4-12517ba65271.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 619.648232] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddf432a4-cdea-4807-b5c4-9aab8a97165e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.660181] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 619.660181] env[62914]: value = "task-4831338" [ 619.660181] env[62914]: _type = "Task" [ 619.660181] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.677571] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831338, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.855919] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831337, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.892722] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.984205] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.985987] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 619.993389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.876s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.993389] env[62914]: DEBUG nova.objects.instance [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lazy-loading 'resources' on Instance uuid 43edad1f-cff0-4d3c-a721-98277d1cddc2 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 620.127943] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.171462] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831338, 'name': Rename_Task, 'duration_secs': 0.252975} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.171766] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 620.172058] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bdfd7c0-efe8-47d7-84a2-8c10513365ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.180878] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 620.180878] env[62914]: value = "task-4831339" [ 620.180878] env[62914]: _type = "Task" [ 620.180878] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.195627] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.355271] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831337, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657298} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.355271] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1/6a9c973f-8aea-4403-9fa2-d37e5eec1ee1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 620.355271] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.355271] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0d1da87-d4cd-4ee8-b214-2b2ff68caf50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.363338] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 620.363338] env[62914]: value = "task-4831340" [ 620.363338] env[62914]: _type = "Task" [ 620.363338] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.378579] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.391919] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.498886] env[62914]: DEBUG nova.compute.utils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.506312] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 620.506312] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.580168] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Successfully updated port: 99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.632711] env[62914]: DEBUG nova.policy [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '100272696b464561889b452f7c318a34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7da23e8d3c044f178c224a3e40a346a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 620.652217] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.697495] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831339, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.740137] env[62914]: DEBUG nova.network.neutron [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.874402] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137715} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.874714] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.879155] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84b28a4-08d5-4f23-84b1-bab5d29438b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.909670] env[62914]: DEBUG oslo_vmware.api [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831331, 'name': PowerOnVM_Task, 'duration_secs': 3.458627} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.919951] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1/6a9c973f-8aea-4403-9fa2-d37e5eec1ee1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.924568] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 620.924568] env[62914]: INFO nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Took 16.62 seconds to spawn the instance on the hypervisor. [ 620.924568] env[62914]: DEBUG nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 620.924568] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe7c230f-fb69-4023-b50c-788169af7499 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.942404] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5052c7b6-f378-498f-9ec2-cb657e5848f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.954644] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 620.954644] env[62914]: value = "task-4831341" [ 620.954644] env[62914]: _type = "Task" [ 620.954644] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.966328] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.982476] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2fb00e-0929-47e9-944f-5e5ca2b0b7d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.994338] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93034b88-a536-4a50-9209-7e6ba473f881 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.030397] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 621.036122] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d98a80-c19e-4020-89f4-6b33a4227e56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.047075] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d2f7b2-4554-49c2-9178-5cb6fa4271a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.066581] env[62914]: DEBUG nova.compute.provider_tree [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.084911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.085169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.085375] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 621.134952] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task} progress is 68%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.198188] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831339, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.243758] env[62914]: INFO nova.compute.manager [-] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Took 1.63 seconds to deallocate network for instance. [ 621.252263] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully created port: 668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.466440] env[62914]: INFO nova.compute.manager [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Took 23.85 seconds to build instance. [ 621.472941] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831341, 'name': ReconfigVM_Task, 'duration_secs': 0.474689} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.473631] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1/6a9c973f-8aea-4403-9fa2-d37e5eec1ee1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.474462] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fae890e-5501-4220-8427-db774d69421a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.483054] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 621.483054] env[62914]: value = "task-4831342" [ 621.483054] env[62914]: _type = "Task" [ 621.483054] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.495396] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831342, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.570769] env[62914]: DEBUG nova.scheduler.client.report [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.641458] env[62914]: DEBUG oslo_vmware.api [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831325, 'name': PowerOnVM_Task, 'duration_secs': 4.942068} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.642690] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.646345] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 621.646649] env[62914]: INFO nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Took 17.89 seconds to spawn the instance on the hypervisor. [ 621.646649] env[62914]: DEBUG nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 621.648162] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef7d8a0-ea5a-4142-9016-c5cfc941769f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.666453] env[62914]: DEBUG nova.compute.manager [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Received event network-changed-5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 621.666914] env[62914]: DEBUG nova.compute.manager [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Refreshing instance network info cache due to event network-changed-5dee04a2-563a-4fb4-8651-bec18ae531ea. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 621.667339] env[62914]: DEBUG oslo_concurrency.lockutils [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] Acquiring lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.667339] env[62914]: DEBUG oslo_concurrency.lockutils [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] Acquired lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.668126] env[62914]: DEBUG nova.network.neutron [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Refreshing network info cache for port 5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 621.704358] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831339, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.755421] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.899357] env[62914]: DEBUG nova.network.neutron [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Updating instance_info_cache with network_info: [{"id": "99005c2d-b79b-4aba-b30d-613274dad233", "address": "fa:16:3e:35:72:ab", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99005c2d-b7", "ovs_interfaceid": "99005c2d-b79b-4aba-b30d-613274dad233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.920025] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully created port: a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.974605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-58ac5507-06a3-4729-abb6-1d85e4a64a1f tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.370s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.999051] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831342, 'name': Rename_Task, 'duration_secs': 0.213077} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.000346] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 622.000640] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89e3e464-ae3f-4ce0-b966-fcd4f2357ea4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.008557] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 622.008557] env[62914]: value = "task-4831343" [ 622.008557] env[62914]: _type = "Task" [ 622.008557] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.017386] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.043049] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 622.074285] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.074585] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.074794] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.075072] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.075461] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.075461] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.075731] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.075731] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.075896] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.076089] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.076523] env[62914]: DEBUG nova.virt.hardware [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.077274] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5b6a17-4c3a-4a48-b90d-3ced7ba0bfa3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.080493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.082790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.096s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.090617] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab82994-6e59-44c7-9b80-ce1c27329a43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.111517] env[62914]: INFO nova.scheduler.client.report [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Deleted allocations for instance 43edad1f-cff0-4d3c-a721-98277d1cddc2 [ 622.190834] env[62914]: INFO nova.compute.manager [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Took 25.92 seconds to build instance. [ 622.205056] env[62914]: DEBUG oslo_vmware.api [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831339, 'name': PowerOnVM_Task, 'duration_secs': 1.57555} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.205391] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 622.205760] env[62914]: INFO nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Took 12.65 seconds to spawn the instance on the hypervisor. [ 622.205968] env[62914]: DEBUG nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 622.208154] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e195c4f6-d732-4b72-b1e6-dc7eec4a115b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.350266] env[62914]: DEBUG nova.compute.manager [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Received event network-vif-deleted-7a9ccc3c-c2df-44ff-af19-7ce5b3048eb0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 622.350532] env[62914]: DEBUG nova.compute.manager [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Received event network-changed-a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 622.350709] env[62914]: DEBUG nova.compute.manager [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Refreshing instance network info cache due to event network-changed-a37b3d57-45a7-4167-970b-4734a54661f8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 622.350952] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Acquiring lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.351114] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Acquired lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.351283] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Refreshing network info cache for port a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.402150] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.402475] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Instance network_info: |[{"id": "99005c2d-b79b-4aba-b30d-613274dad233", "address": "fa:16:3e:35:72:ab", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99005c2d-b7", "ovs_interfaceid": "99005c2d-b79b-4aba-b30d-613274dad233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 622.402948] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:72:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99005c2d-b79b-4aba-b30d-613274dad233', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.415634] env[62914]: DEBUG oslo.service.loopingcall [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.415890] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 622.416195] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2af30905-1e24-4aba-8d09-3ea274a2c299 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.441912] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.441912] env[62914]: value = "task-4831344" [ 622.441912] env[62914]: _type = "Task" [ 622.441912] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.452557] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831344, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.470373] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully created port: a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.478425] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 622.526699] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.590066] env[62914]: INFO nova.compute.claims [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.619724] env[62914]: DEBUG nova.network.neutron [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Updated VIF entry in instance network info cache for port 5dee04a2-563a-4fb4-8651-bec18ae531ea. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 622.620030] env[62914]: DEBUG nova.network.neutron [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Updating instance_info_cache with network_info: [{"id": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "address": "fa:16:3e:a5:0d:df", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dee04a2-56", "ovs_interfaceid": "5dee04a2-563a-4fb4-8651-bec18ae531ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.626695] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edcbf416-5890-4882-b827-401b91a888a6 tempest-ServerDiagnosticsNegativeTest-45432392 tempest-ServerDiagnosticsNegativeTest-45432392-project-member] Lock "43edad1f-cff0-4d3c-a721-98277d1cddc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.680s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.694155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b2ee77b0-1da0-47e8-a9e1-93e52a47789d tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.445s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.732468] env[62914]: INFO nova.compute.manager [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Took 22.49 seconds to build instance. [ 622.953641] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831344, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.015026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.024672] env[62914]: DEBUG oslo_vmware.api [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831343, 'name': PowerOnVM_Task, 'duration_secs': 0.797012} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.028059] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 623.028793] env[62914]: INFO nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Took 10.89 seconds to spawn the instance on the hypervisor. [ 623.029146] env[62914]: DEBUG nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 623.030106] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b04721-9462-41cc-90c1-424e7f74baa6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.101183] env[62914]: INFO nova.compute.resource_tracker [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating resource usage from migration 2ccde65d-d838-4157-a87e-deeb37150b7b [ 623.126262] env[62914]: DEBUG oslo_concurrency.lockutils [req-735417c3-54f0-46a9-9a5a-d2ab73bcce05 req-97a12fb9-57dd-4ee2-be0f-2f9246bdd83e service nova] Releasing lock "refresh_cache-6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.170030] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updated VIF entry in instance network info cache for port a37b3d57-45a7-4167-970b-4734a54661f8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 623.170182] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updating instance_info_cache with network_info: [{"id": "a37b3d57-45a7-4167-970b-4734a54661f8", "address": "fa:16:3e:85:44:f6", "network": {"id": "3cc69eb5-cd59-4351-8a69-68f647db0af1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-590356650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfac7a5b4e7349688942cac59bd2adfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa37b3d57-45", "ovs_interfaceid": "a37b3d57-45a7-4167-970b-4734a54661f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.232820] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d32333a2-7689-4520-9356-4a10fac92638 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.006s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.454140] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831344, 'name': CreateVM_Task, 'duration_secs': 0.536415} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.457479] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 623.458843] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.459221] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.459742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 623.460145] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01f8bd02-eb92-467e-a5a0-4e0afca7bbe8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.466870] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 623.466870] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525cd141-4c1b-ac8e-6cd2-8bc08892d73d" [ 623.466870] env[62914]: _type = "Task" [ 623.466870] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.480364] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525cd141-4c1b-ac8e-6cd2-8bc08892d73d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.506020] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1838ed1-50b8-4e7b-95d4-1014b1b7ae60 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.514119] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c723a8-c718-4d1a-97b4-f814344becc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.552613] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ce1cc9-dd34-4a6a-8d96-d92c6a6b2267 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.558669] env[62914]: INFO nova.compute.manager [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Took 22.33 seconds to build instance. [ 623.563857] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33707c7-eb09-422a-a072-ad8fb7ac4bb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.579495] env[62914]: DEBUG nova.compute.provider_tree [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.674801] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Releasing lock "refresh_cache-a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.675194] env[62914]: DEBUG nova.compute.manager [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Received event network-changed-cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 623.675440] env[62914]: DEBUG nova.compute.manager [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Refreshing instance network info cache due to event network-changed-cf87f855-3a4c-43d5-a06f-db1eb5eec958. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 623.675838] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Acquiring lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.675977] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Acquired lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.676236] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Refreshing network info cache for port cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 623.979386] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525cd141-4c1b-ac8e-6cd2-8bc08892d73d, 'name': SearchDatastore_Task, 'duration_secs': 0.014155} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.979801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.980070] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.980360] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.980555] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.980768] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.981059] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b6a321a-93d9-41ab-a518-b8c9e9f266ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.992025] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.992167] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 623.993349] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f4d956-c671-4ff7-a553-6d1ba386278f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.999724] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 623.999724] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caa70c-1ec1-2b0a-70bb-f74ec5e7d7ee" [ 623.999724] env[62914]: _type = "Task" [ 623.999724] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.009082] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caa70c-1ec1-2b0a-70bb-f74ec5e7d7ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.060865] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ca8fd23b-0874-4abc-a0f8-dc586b2b144a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.844s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.084318] env[62914]: DEBUG nova.scheduler.client.report [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.499353] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updated VIF entry in instance network info cache for port cf87f855-3a4c-43d5-a06f-db1eb5eec958. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 624.499777] env[62914]: DEBUG nova.network.neutron [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updating instance_info_cache with network_info: [{"id": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "address": "fa:16:3e:6b:7c:d2", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf87f855-3a", "ovs_interfaceid": "cf87f855-3a4c-43d5-a06f-db1eb5eec958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.515146] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caa70c-1ec1-2b0a-70bb-f74ec5e7d7ee, 'name': SearchDatastore_Task, 'duration_secs': 0.015101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.516697] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa5b8b2-bf98-499a-8ef5-24dd82091c96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.524838] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 624.524838] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5246d377-5358-f05e-f061-476926eb2826" [ 624.524838] env[62914]: _type = "Task" [ 624.524838] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.536588] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5246d377-5358-f05e-f061-476926eb2826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.593011] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.510s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.593371] env[62914]: INFO nova.compute.manager [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Migrating [ 624.593688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.593861] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.596105] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.792s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.599440] env[62914]: INFO nova.compute.claims [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.674762] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.675315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.946207] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully updated port: 668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.007221] env[62914]: DEBUG oslo_concurrency.lockutils [req-131c36fe-169a-4173-a60c-0324e8960d0f req-ff52bf86-23e4-466a-a196-8553cedb6833 service nova] Releasing lock "refresh_cache-e1018767-71e4-49c9-bd4d-02eae39dc26b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.037422] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5246d377-5358-f05e-f061-476926eb2826, 'name': SearchDatastore_Task, 'duration_secs': 0.016387} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.038026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.038351] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 9ce44ae9-9369-4c0c-9d14-9c8fde42d612/9ce44ae9-9369-4c0c-9d14-9c8fde42d612.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 625.038707] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac2024c2-e2c0-4185-8dd3-3ef5885778cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.048406] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 625.048406] env[62914]: value = "task-4831345" [ 625.048406] env[62914]: _type = "Task" [ 625.048406] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.059341] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.104065] env[62914]: INFO nova.compute.rpcapi [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 625.104227] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.178533] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 625.472014] env[62914]: DEBUG nova.compute.manager [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Received event network-changed {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 625.472780] env[62914]: DEBUG nova.compute.manager [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Refreshing instance network info cache due to event network-changed. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 625.473365] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] Acquiring lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.475740] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] Acquired lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.475740] env[62914]: DEBUG nova.network.neutron [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.562714] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831345, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.630822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.630822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.630822] env[62914]: DEBUG nova.network.neutron [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.702662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.048049] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a0103b-3f0b-4bd4-bf64-465eb9a12008 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.063568] env[62914]: DEBUG nova.compute.manager [req-62bc027f-fd17-4232-9e0d-b649e79bd9ed req-1788dabc-69b6-4482-a925-409729dd86d0 service nova] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Received event network-vif-deleted-16cf2714-36d7-443b-a820-1fe738d54164 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 626.071271] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cda5049-1977-4ec2-96d9-80358a046418 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.083837] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.694664} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.083837] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 9ce44ae9-9369-4c0c-9d14-9c8fde42d612/9ce44ae9-9369-4c0c-9d14-9c8fde42d612.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 626.083837] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.083837] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f5bee9f-9c5c-4c4e-89c3-d64b9ed5319e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.124414] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a26bc69-4b22-42db-926a-5040a87e6700 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.130124] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 626.130124] env[62914]: value = "task-4831346" [ 626.130124] env[62914]: _type = "Task" [ 626.130124] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.141770] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f681f4-30ea-4761-94c6-08b5e54dcb7d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.151372] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.163130] env[62914]: DEBUG nova.compute.provider_tree [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 626.311356] env[62914]: DEBUG nova.compute.manager [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Received event network-vif-plugged-99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 626.311688] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Acquiring lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.312371] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.312371] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.312371] env[62914]: DEBUG nova.compute.manager [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] No waiting events found dispatching network-vif-plugged-99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 626.313598] env[62914]: WARNING nova.compute.manager [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Received unexpected event network-vif-plugged-99005c2d-b79b-4aba-b30d-613274dad233 for instance with vm_state building and task_state spawning. [ 626.313598] env[62914]: DEBUG nova.compute.manager [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Received event network-changed-99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 626.313598] env[62914]: DEBUG nova.compute.manager [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Refreshing instance network info cache due to event network-changed-99005c2d-b79b-4aba-b30d-613274dad233. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 626.313598] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Acquiring lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.313598] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Acquired lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.313769] env[62914]: DEBUG nova.network.neutron [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Refreshing network info cache for port 99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 626.412430] env[62914]: DEBUG nova.network.neutron [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Updating instance_info_cache with network_info: [{"id": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "address": "fa:16:3e:48:b6:8a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ce4bc2-c0", "ovs_interfaceid": "b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.531132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.531680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.643797] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232253} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.644336] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.645345] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9953cb24-cdeb-4aa2-afb2-d78c56717eaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.673160] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 9ce44ae9-9369-4c0c-9d14-9c8fde42d612/9ce44ae9-9369-4c0c-9d14-9c8fde42d612.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.677959] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-210c40d6-f29e-4c67-8919-8a1f41d3b298 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.697496] env[62914]: DEBUG nova.network.neutron [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.706762] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 626.706762] env[62914]: value = "task-4831347" [ 626.706762] env[62914]: _type = "Task" [ 626.706762] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.717975] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.727847] env[62914]: ERROR nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [req-ccac3b14-4df7-48ff-8135-a060452d2b46] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f2f7a014-852b-4b37-9610-c5761f4b0175. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ccac3b14-4df7-48ff-8135-a060452d2b46"}]} [ 626.749942] env[62914]: DEBUG nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 626.767862] env[62914]: DEBUG nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 626.768128] env[62914]: DEBUG nova.compute.provider_tree [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 626.782222] env[62914]: DEBUG nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 626.818950] env[62914]: DEBUG nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 626.916515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d39d2461-1c46-4097-b4ea-283c773d8bc1 tempest-ServerExternalEventsTest-1184783838 tempest-ServerExternalEventsTest-1184783838-project] Releasing lock "refresh_cache-ea214cc0-0f7a-4aee-9906-8d47e660c8f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.034423] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 627.202170] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.218326] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.227914] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c59ae7e-8722-44f3-9fe9-02abe0fbde12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.236686] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c72b087-e0a5-41e2-a349-fef14232af2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.271897] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbf5e0a-5dbe-43d8-90a2-f1dc41501063 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.280550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4dba6a-c74e-4bc0-9a91-e2033943a66a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.295588] env[62914]: DEBUG nova.compute.provider_tree [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 627.564723] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.719088] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831347, 'name': ReconfigVM_Task, 'duration_secs': 0.734867} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.719460] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 9ce44ae9-9369-4c0c-9d14-9c8fde42d612/9ce44ae9-9369-4c0c-9d14-9c8fde42d612.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.720154] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-362bb127-0c28-4b31-a111-05c799041053 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.727601] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 627.727601] env[62914]: value = "task-4831348" [ 627.727601] env[62914]: _type = "Task" [ 627.727601] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.741031] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831348, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.818337] env[62914]: DEBUG nova.network.neutron [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Updated VIF entry in instance network info cache for port 99005c2d-b79b-4aba-b30d-613274dad233. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 627.818784] env[62914]: DEBUG nova.network.neutron [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Updating instance_info_cache with network_info: [{"id": "99005c2d-b79b-4aba-b30d-613274dad233", "address": "fa:16:3e:35:72:ab", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99005c2d-b7", "ovs_interfaceid": "99005c2d-b79b-4aba-b30d-613274dad233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.845989] env[62914]: DEBUG nova.scheduler.client.report [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updated inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with generation 32 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 627.846260] env[62914]: DEBUG nova.compute.provider_tree [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 32 to 33 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 627.846525] env[62914]: DEBUG nova.compute.provider_tree [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 628.245359] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831348, 'name': Rename_Task, 'duration_secs': 0.395978} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.245635] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 628.246027] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d552546-155d-49ea-82d3-a3d7148c9952 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.255308] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 628.255308] env[62914]: value = "task-4831349" [ 628.255308] env[62914]: _type = "Task" [ 628.255308] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.264702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.264957] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.266055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.266055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.266055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.267471] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.273378] env[62914]: INFO nova.compute.manager [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Terminating instance [ 628.276128] env[62914]: DEBUG nova.compute.manager [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 628.276128] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 628.276128] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae35daa6-e02c-47b8-af89-4144a61c1553 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.284362] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 628.284681] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2ddae57-6f94-49fe-a828-570f141edb7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.291924] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 628.291924] env[62914]: value = "task-4831350" [ 628.291924] env[62914]: _type = "Task" [ 628.291924] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.295965] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully updated port: a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.303851] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.321487] env[62914]: DEBUG oslo_concurrency.lockutils [req-c1530f74-1030-4cd8-a3d7-34d05f344c4d req-a53ea83b-8504-48f6-84eb-737680585185 service nova] Releasing lock "refresh_cache-9ce44ae9-9369-4c0c-9d14-9c8fde42d612" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.351532] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.352779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.353186] env[62914]: DEBUG nova.compute.manager [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 628.354148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.758s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.354820] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 628.359399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813fe097-b71c-4ed2-b576-efd965dae32c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.363866] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.336s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.365673] env[62914]: INFO nova.compute.claims [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.374720] env[62914]: DEBUG nova.compute.manager [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 628.375403] env[62914]: DEBUG nova.objects.instance [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'flavor' on Instance uuid 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 628.721159] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c0a046-2ee8-49fa-981f-65bc2c47299f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.744582] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 628.767148] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831349, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.813230] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831350, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.861258] env[62914]: DEBUG nova.compute.utils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.863043] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 628.863366] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 628.880943] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 628.883965] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68ae757b-eb2a-4cc9-b034-c7bc210b9bad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.893317] env[62914]: DEBUG oslo_vmware.api [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 628.893317] env[62914]: value = "task-4831351" [ 628.893317] env[62914]: _type = "Task" [ 628.893317] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.905867] env[62914]: DEBUG oslo_vmware.api [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.051167] env[62914]: DEBUG nova.policy [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f79b04d732dd48d09719d422aa9137d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '087c2e0c12a8423cace93462477bb622', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 629.254683] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 629.257348] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdd8465b-34cb-4500-b2d8-30bab2615a26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.274139] env[62914]: DEBUG oslo_vmware.api [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831349, 'name': PowerOnVM_Task, 'duration_secs': 0.916064} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.275793] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 629.276038] env[62914]: INFO nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Took 9.90 seconds to spawn the instance on the hypervisor. [ 629.276604] env[62914]: DEBUG nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 629.276790] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 629.276790] env[62914]: value = "task-4831352" [ 629.276790] env[62914]: _type = "Task" [ 629.276790] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.277410] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405bc72d-a57c-4115-97e6-fb4c3d855ff3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.301232] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.319176] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831350, 'name': PowerOffVM_Task, 'duration_secs': 0.551335} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.319176] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 629.319176] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 629.319176] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e449265c-2e18-4f1d-9f81-997b705ff9f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.369030] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 629.409078] env[62914]: DEBUG oslo_vmware.api [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831351, 'name': PowerOffVM_Task, 'duration_secs': 0.210997} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.410039] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 629.410039] env[62914]: DEBUG nova.compute.manager [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 629.410580] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652bed21-ac50-4f4e-8522-a02bf68acbe3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.790788] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831352, 'name': PowerOffVM_Task, 'duration_secs': 0.35151} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.791186] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 629.791263] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 629.811958] env[62914]: INFO nova.compute.manager [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Took 23.98 seconds to build instance. [ 629.884549] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f294ad52-82d0-4060-ae85-0ebb0361df4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.893209] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fc4af4-dfaf-4946-a0f3-5b6719d67851 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.929122] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b1a995-e92d-4a90-9cfa-0614fb795676 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.935440] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ed379321-a5d1-40f1-a3be-15498f5efa90 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.583s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.940122] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81df546d-c89b-4f69-9c8e-92016cbfad0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.956807] env[62914]: DEBUG nova.compute.provider_tree [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.087802] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 630.088064] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing instance network info cache due to event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 630.088294] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquiring lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.088440] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquired lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.088636] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.123210] env[62914]: DEBUG nova.compute.manager [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Received event network-changed-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 630.123414] env[62914]: DEBUG nova.compute.manager [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Refreshing instance network info cache due to event network-changed-fc7b353a-564b-4bbe-b0e1-85f5f54f7092. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 630.124500] env[62914]: DEBUG oslo_concurrency.lockutils [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] Acquiring lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.124500] env[62914]: DEBUG oslo_concurrency.lockutils [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] Acquired lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.124500] env[62914]: DEBUG nova.network.neutron [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Refreshing network info cache for port fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.269010] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Successfully created port: 510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 630.298429] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 630.298899] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 630.298899] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.299076] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 630.299238] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.299357] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 630.299888] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 630.299888] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 630.300052] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 630.300116] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 630.300263] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.305475] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18931638-542a-483b-8345-6e18c28b3949 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.316979] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60e2f5c0-21ad-4eb9-9ec5-1228db55bb8d tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.501s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.323795] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 630.323795] env[62914]: value = "task-4831354" [ 630.323795] env[62914]: _type = "Task" [ 630.323795] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.334214] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831354, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.379778] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 630.421044] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 630.421044] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 630.421044] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.421044] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 630.421296] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.421296] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 630.421376] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 630.422557] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 630.422557] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 630.423164] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 630.423164] env[62914]: DEBUG nova.virt.hardware [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.424036] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d516bd1-f73d-45c0-8b9f-e1cc1873e891 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.434930] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d65bb5-e39e-431a-87ff-55e916dedf6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.460405] env[62914]: DEBUG nova.scheduler.client.report [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.837744] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831354, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.973705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.973705] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 630.975788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.519s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.977045] env[62914]: INFO nova.compute.claims [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.162008] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updated VIF entry in instance network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 631.162366] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updating instance_info_cache with network_info: [{"id": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "address": "fa:16:3e:ec:dc:0a", "network": {"id": "ba7c830d-c4a1-418f-a747-d997a634b13f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1412355570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33e55050ffe94a588a5db112563b5555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e7a9bbc-61", "ovs_interfaceid": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.232104] env[62914]: DEBUG nova.network.neutron [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updated VIF entry in instance network info cache for port fc7b353a-564b-4bbe-b0e1-85f5f54f7092. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 631.232104] env[62914]: DEBUG nova.network.neutron [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updating instance_info_cache with network_info: [{"id": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "address": "fa:16:3e:2f:34:dc", "network": {"id": "e8c1428c-2477-4bff-b70c-eb461db96ea5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-779065668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a83acb637b5c47f395d677ee48e37dae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7b353a-56", "ovs_interfaceid": "fc7b353a-564b-4bbe-b0e1-85f5f54f7092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.346496] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831354, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.483275] env[62914]: DEBUG nova.compute.utils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.490439] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 631.490819] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 631.542246] env[62914]: DEBUG nova.policy [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a8cfcd0aed9499a83c09052328647cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '894c73ea90624428afeb1165afbbfa9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 631.616958] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 631.616958] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 631.616958] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Deleting the datastore file [datastore1] ea214cc0-0f7a-4aee-9906-8d47e660c8f7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 631.616958] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93423b24-97e1-4b39-b76b-f5d74161d5ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.625420] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for the task: (returnval){ [ 631.625420] env[62914]: value = "task-4831355" [ 631.625420] env[62914]: _type = "Task" [ 631.625420] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.641314] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.669365] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Releasing lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.669365] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-plugged-668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 631.669365] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.669365] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.669365] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.669639] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] No waiting events found dispatching network-vif-plugged-668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 631.669639] env[62914]: WARNING nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received unexpected event network-vif-plugged-668a09a1-427d-4507-b7d0-45cab066cac8 for instance with vm_state building and task_state spawning. [ 631.669639] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-changed-668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 631.669639] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing instance network info cache due to event network-changed-668a09a1-427d-4507-b7d0-45cab066cac8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 631.669800] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquiring lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.669940] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquired lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.670118] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing network info cache for port 668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 631.738919] env[62914]: DEBUG oslo_concurrency.lockutils [req-137863b6-628d-4bc3-a758-f7f1d6768614 req-dd39eb83-0aa9-44db-b138-d20d9ee98d68 service nova] Releasing lock "refresh_cache-e69c36e9-3c59-48e3-9962-ffe8de10a789" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.839797] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831354, 'name': ReconfigVM_Task, 'duration_secs': 1.037718} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.839797] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 631.957899] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Successfully created port: 1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.005891] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 632.141593] env[62914]: DEBUG oslo_vmware.api [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Task: {'id': task-4831355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475902} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.142326] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 632.142326] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 632.142432] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 632.143592] env[62914]: INFO nova.compute.manager [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Took 3.87 seconds to destroy the instance on the hypervisor. [ 632.143592] env[62914]: DEBUG oslo.service.loopingcall [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 632.149497] env[62914]: DEBUG nova.compute.manager [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 632.149625] env[62914]: DEBUG nova.network.neutron [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 632.174375] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Successfully updated port: a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.293527] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.352027] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.352027] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.352027] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.352698] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.355903] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.355903] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.355903] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.355903] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.355903] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.357542] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.357542] env[62914]: DEBUG nova.virt.hardware [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.364244] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 632.364244] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bf0a01d-ab2c-404f-8ee9-a6fe25aff396 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.400866] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 632.400866] env[62914]: value = "task-4831356" [ 632.400866] env[62914]: _type = "Task" [ 632.400866] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.410926] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.524260] env[62914]: DEBUG nova.network.neutron [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.568139] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392363ad-e52c-4343-b3c8-3687a4e19c2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.581426] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68783a6-bea4-42d1-9106-695fc99804a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.631954] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ec63f1-5c46-4183-8718-8de7661ba733 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.642671] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce13a58-e766-48c2-a2d2-1670c5429a2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.661046] env[62914]: DEBUG nova.compute.provider_tree [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.683220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.913972] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831356, 'name': ReconfigVM_Task, 'duration_secs': 0.194282} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.914452] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 632.915356] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953ad5e8-f477-424f-8c59-3cf4823944b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.946097] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.946907] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-090a94bf-b6d6-4e60-ace6-b42441b6de5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.967692] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 632.967692] env[62914]: value = "task-4831357" [ 632.967692] env[62914]: _type = "Task" [ 632.967692] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.977421] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.020710] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 633.028531] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Releasing lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.030029] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-plugged-a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 633.030214] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.030379] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.030553] env[62914]: DEBUG oslo_concurrency.lockutils [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.030736] env[62914]: DEBUG nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] No waiting events found dispatching network-vif-plugged-a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 633.030937] env[62914]: WARNING nova.compute.manager [req-d15a7c6b-bb8f-494c-9729-7913052d1758 req-3a71d8ef-0453-432f-8d51-de8db98ee093 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received unexpected event network-vif-plugged-a1cdb314-5a29-443e-8562-bced871a8df1 for instance with vm_state building and task_state spawning. [ 633.031409] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.031506] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.068853] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.069124] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.069289] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.069507] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.069966] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.070675] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.071321] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.071384] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.071539] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.071722] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.072022] env[62914]: DEBUG nova.virt.hardware [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.076031] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f9ee87-46de-43b1-be32-da533f8bb218 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.087550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c49c6a6-ad59-4f20-b1a3-19c9f5c4e3ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.168486] env[62914]: DEBUG nova.scheduler.client.report [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.478932] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831357, 'name': ReconfigVM_Task, 'duration_secs': 0.509364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.479302] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Reconfigured VM instance instance-00000007 to attach disk [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5/aede8da7-8bf2-4963-b08b-6e06007614a5.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.479645] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 633.483960] env[62914]: DEBUG nova.network.neutron [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.580565] env[62914]: DEBUG nova.compute.manager [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 633.585852] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d53d9f-9ffc-49db-b3f4-90b62f1a1146 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.601373] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.676360] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.677524] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 633.690071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.183s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.690442] env[62914]: DEBUG nova.objects.instance [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lazy-loading 'resources' on Instance uuid db31a794-3928-41bb-afd8-14fae9357654 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 633.939173] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Successfully updated port: 510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.987575] env[62914]: INFO nova.compute.manager [-] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Took 1.84 seconds to deallocate network for instance. [ 633.991739] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae24c24-2436-489a-8c44-ca012aed6983 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.021581] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f664acbc-06d7-4616-84cc-eb84cdbd1309 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.043913] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 634.103075] env[62914]: INFO nova.compute.manager [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] instance snapshotting [ 634.103290] env[62914]: WARNING nova.compute.manager [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 634.106179] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4c442f-ca35-4ffc-af9a-914652ee2bde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.136247] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc1f4cf-98b7-4e08-807b-ff7e3c6c7d03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.192311] env[62914]: DEBUG nova.compute.utils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 634.196147] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 634.196431] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 634.289081] env[62914]: DEBUG nova.policy [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e291489da35649d0a2c69f98714d89ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea39ac6e2d400ca89bbffc20d764ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 634.443644] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.443644] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquired lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.443644] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.504967] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.563642] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Successfully updated port: 1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.617418] env[62914]: DEBUG nova.network.neutron [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updating instance_info_cache with network_info: [{"id": "668a09a1-427d-4507-b7d0-45cab066cac8", "address": "fa:16:3e:30:a2:8d", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap668a09a1-42", "ovs_interfaceid": "668a09a1-427d-4507-b7d0-45cab066cac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1cdb314-5a29-443e-8562-bced871a8df1", "address": "fa:16:3e:46:47:35", "network": {"id": "f2647c62-0d1d-4453-a988-588ff12a0d92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020261419", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1cdb314-5a", "ovs_interfaceid": "a1cdb314-5a29-443e-8562-bced871a8df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "address": "fa:16:3e:bd:4b:0b", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2583c61-5b", "ovs_interfaceid": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.645230] env[62914]: DEBUG nova.network.neutron [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Port ae6db457-8035-4a28-bf52-7113144cfe11 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 634.653210] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 634.654103] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f8d8aea0-d966-4c92-891f-67c423d476cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.665509] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 634.665509] env[62914]: value = "task-4831358" [ 634.665509] env[62914]: _type = "Task" [ 634.665509] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.682035] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831358, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.701952] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9921b71-d626-4fae-bc77-688418e37578 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.705598] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 634.714682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840b4a8c-304f-4761-8150-c38884b2f5e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.753625] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e569702b-3115-4234-9b1b-0454b41f32fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.764564] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b509f5d3-6b50-49b7-9cde-0aa84afe31aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.780930] env[62914]: DEBUG nova.compute.provider_tree [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.831169] env[62914]: DEBUG nova.compute.manager [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-changed-a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 634.831694] env[62914]: DEBUG nova.compute.manager [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing instance network info cache due to event network-changed-a1cdb314-5a29-443e-8562-bced871a8df1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 634.831694] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Acquiring lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.837890] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Successfully created port: 90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.850941] env[62914]: DEBUG nova.compute.manager [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-changed-b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 634.850941] env[62914]: DEBUG nova.compute.manager [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing instance network info cache due to event network-changed-b92603ac-8bea-4f9a-aa50-8c942106916d. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 634.850941] env[62914]: DEBUG oslo_concurrency.lockutils [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.850941] env[62914]: DEBUG oslo_concurrency.lockutils [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.851741] env[62914]: DEBUG nova.network.neutron [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing network info cache for port b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.038223] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.076658] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.077067] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.077364] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 635.100673] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49397a7a-a973-40fb-bad0-9dbf157432b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.110972] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Suspending the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 635.110972] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-12a5ee07-b1d1-477a-9af2-40d5fe1fd5be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.124421] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.124959] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance network_info: |[{"id": "668a09a1-427d-4507-b7d0-45cab066cac8", "address": "fa:16:3e:30:a2:8d", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap668a09a1-42", "ovs_interfaceid": "668a09a1-427d-4507-b7d0-45cab066cac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1cdb314-5a29-443e-8562-bced871a8df1", "address": "fa:16:3e:46:47:35", "network": {"id": "f2647c62-0d1d-4453-a988-588ff12a0d92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020261419", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1cdb314-5a", "ovs_interfaceid": "a1cdb314-5a29-443e-8562-bced871a8df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "address": "fa:16:3e:bd:4b:0b", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2583c61-5b", "ovs_interfaceid": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 635.125497] env[62914]: DEBUG oslo_vmware.api [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] Waiting for the task: (returnval){ [ 635.125497] env[62914]: value = "task-4831359" [ 635.125497] env[62914]: _type = "Task" [ 635.125497] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.125850] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Acquired lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.126157] env[62914]: DEBUG nova.network.neutron [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing network info cache for port a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.127940] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:a2:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '668a09a1-427d-4507-b7d0-45cab066cac8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:47:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0721b358-3768-472d-95f8-6d6755ab1635', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1cdb314-5a29-443e-8562-bced871a8df1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:4b:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2583c61-5b1a-4a33-8206-4f81fca1b131', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.140362] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Creating folder: Project (7da23e8d3c044f178c224a3e40a346a3). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 635.143666] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0d2b104-109c-4a64-9c8b-0c1d2705ac00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.161245] env[62914]: DEBUG oslo_vmware.api [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] Task: {'id': task-4831359, 'name': SuspendVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.163069] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Created folder: Project (7da23e8d3c044f178c224a3e40a346a3) in parent group-v941773. [ 635.163343] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Creating folder: Instances. Parent ref: group-v941826. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 635.163864] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f14c1028-9d16-4347-b59d-b4d4543e0b83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.178853] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831358, 'name': CreateSnapshot_Task, 'duration_secs': 0.454177} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.185248] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 635.185767] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Created folder: Instances in parent group-v941826. [ 635.186111] env[62914]: DEBUG oslo.service.loopingcall [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.187256] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3849b0-b64f-4fbd-b725-bf6af870264b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.191466] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 635.193038] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aa758b9-a6e3-483d-a954-497efa09e220 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.240911] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.240911] env[62914]: value = "task-4831362" [ 635.240911] env[62914]: _type = "Task" [ 635.240911] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.250735] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831362, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.286970] env[62914]: DEBUG nova.scheduler.client.report [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.488606] env[62914]: DEBUG nova.compute.manager [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 635.489734] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005378e8-f077-4175-ae67-11bb70d7c12f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.510163] env[62914]: DEBUG nova.network.neutron [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Updating instance_info_cache with network_info: [{"id": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "address": "fa:16:3e:05:02:e6", "network": {"id": "ee66e17c-2591-46b1-8585-3840b7e234ee", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629786297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087c2e0c12a8423cace93462477bb622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap510995e6-4d", "ovs_interfaceid": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.652247] env[62914]: DEBUG oslo_vmware.api [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] Task: {'id': task-4831359, 'name': SuspendVM_Task} progress is 58%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.675558] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.687160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.687160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.687160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.740484] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 635.749245] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 635.750448] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2dbbbe2-aa1b-49b2-ac6d-66c43e06b7b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.769474] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831362, 'name': CreateVM_Task, 'duration_secs': 0.514234} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.770231] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 635.771027] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 635.771027] env[62914]: value = "task-4831363" [ 635.771027] env[62914]: _type = "Task" [ 635.771027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.771816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.772718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.772718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 635.773059] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdab01b3-2d6b-4b7b-b9ee-32d4615f30b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.790379] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 635.790379] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 635.790379] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.790379] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 635.790379] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.792076] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 635.792450] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 635.792639] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 635.792815] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 635.792986] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 635.793200] env[62914]: DEBUG nova.virt.hardware [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 635.794048] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.802121] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96201cc7-95fc-4f95-8cd3-906ba0338ca9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.815023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.910s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.815023] env[62914]: INFO nova.compute.claims [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.817233] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 635.817233] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521bec0e-a428-7818-c7fd-f840b2ab4e38" [ 635.817233] env[62914]: _type = "Task" [ 635.817233] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.817561] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831363, 'name': CloneVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.818795] env[62914]: DEBUG nova.network.neutron [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updated VIF entry in instance network info cache for port a1cdb314-5a29-443e-8562-bced871a8df1. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.820338] env[62914]: DEBUG nova.network.neutron [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updating instance_info_cache with network_info: [{"id": "668a09a1-427d-4507-b7d0-45cab066cac8", "address": "fa:16:3e:30:a2:8d", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap668a09a1-42", "ovs_interfaceid": "668a09a1-427d-4507-b7d0-45cab066cac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1cdb314-5a29-443e-8562-bced871a8df1", "address": "fa:16:3e:46:47:35", "network": {"id": "f2647c62-0d1d-4453-a988-588ff12a0d92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020261419", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1cdb314-5a", "ovs_interfaceid": "a1cdb314-5a29-443e-8562-bced871a8df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "address": "fa:16:3e:bd:4b:0b", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2583c61-5b", "ovs_interfaceid": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.845155] env[62914]: INFO nova.scheduler.client.report [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Deleted allocations for instance db31a794-3928-41bb-afd8-14fae9357654 [ 635.848378] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f6807f-bef5-4491-8a16-4de43a19266b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.862681] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521bec0e-a428-7818-c7fd-f840b2ab4e38, 'name': SearchDatastore_Task, 'duration_secs': 0.019152} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.863949] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.864355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.864973] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.865249] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.865787] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.865787] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e246af2-cd0d-4d28-805b-f074ea3bffce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.888347] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.888683] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 635.889546] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7ae6e91-a8b7-436c-9884-977d09a15e3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.896865] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 635.896865] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cd12c3-9c20-0730-e71c-93bee11dd13b" [ 635.896865] env[62914]: _type = "Task" [ 635.896865] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.907288] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cd12c3-9c20-0730-e71c-93bee11dd13b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.986125] env[62914]: DEBUG nova.network.neutron [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updated VIF entry in instance network info cache for port b92603ac-8bea-4f9a-aa50-8c942106916d. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.986125] env[62914]: DEBUG nova.network.neutron [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.014392] env[62914]: INFO nova.compute.manager [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] instance snapshotting [ 636.016478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Releasing lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.016793] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Instance network_info: |[{"id": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "address": "fa:16:3e:05:02:e6", "network": {"id": "ee66e17c-2591-46b1-8585-3840b7e234ee", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629786297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087c2e0c12a8423cace93462477bb622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap510995e6-4d", "ovs_interfaceid": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 636.017580] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:02:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '510995e6-4d3c-4b63-ae4e-d4c1f74254e8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.025738] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Creating folder: Project (087c2e0c12a8423cace93462477bb622). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.026415] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0a9891-a7e5-4d7b-8856-36baaa1a04e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.030132] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c5fcf63-83c6-4f4f-8743-bd4c055cab1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.059163] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889a79fd-8147-4db2-93b2-13ccec0f453e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.061222] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Created folder: Project (087c2e0c12a8423cace93462477bb622) in parent group-v941773. [ 636.061432] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Creating folder: Instances. Parent ref: group-v941830. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.061720] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0f3b9b0-8fc1-4c14-b228-76938ad82f34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.075461] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Created folder: Instances in parent group-v941830. [ 636.075874] env[62914]: DEBUG oslo.service.loopingcall [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.076026] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 636.077272] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dfe4965-4ad6-40a3-847f-1a46638de2fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.096695] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.096695] env[62914]: value = "task-4831366" [ 636.096695] env[62914]: _type = "Task" [ 636.096695] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.107787] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831366, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.113774] env[62914]: DEBUG nova.network.neutron [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.151714] env[62914]: DEBUG oslo_vmware.api [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] Task: {'id': task-4831359, 'name': SuspendVM_Task, 'duration_secs': 0.756546} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.152035] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Suspended the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 636.152230] env[62914]: DEBUG nova.compute.manager [None req-1cf5e62f-3d47-4217-9b54-824641185798 tempest-ServersAdminNegativeTestJSON-2035012517 tempest-ServersAdminNegativeTestJSON-2035012517-project-admin] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 636.153200] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1c0fae-c2e8-4e7a-b1cd-e139b0bdcb8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.290298] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.290298] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.291380] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831363, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.331458] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Releasing lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.333436] env[62914]: DEBUG nova.compute.manager [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-plugged-a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 636.333436] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.333436] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.333436] env[62914]: DEBUG oslo_concurrency.lockutils [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.333436] env[62914]: DEBUG nova.compute.manager [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] No waiting events found dispatching network-vif-plugged-a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 636.333436] env[62914]: WARNING nova.compute.manager [req-ac1eaecd-57bc-4f8d-9c9d-a613ad2dbab9 req-a490dc82-0b83-430a-a5a6-89389fc2635b service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received unexpected event network-vif-plugged-a2583c61-5b1a-4a33-8206-4f81fca1b131 for instance with vm_state building and task_state spawning. [ 636.333436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.333436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.368331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1e9492e3-c61f-4e52-a70f-f3e61cff29ed tempest-ServerDiagnosticsTest-352645101 tempest-ServerDiagnosticsTest-352645101-project-member] Lock "db31a794-3928-41bb-afd8-14fae9357654" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.944s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.410801] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cd12c3-9c20-0730-e71c-93bee11dd13b, 'name': SearchDatastore_Task, 'duration_secs': 0.011783} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.411792] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2559005-9424-47d7-906b-1611022adb0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.422629] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.422942] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.426058] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 636.426058] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109081-3416-60be-d0b8-be531fadda53" [ 636.426058] env[62914]: _type = "Task" [ 636.426058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.440328] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109081-3416-60be-d0b8-be531fadda53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.487750] env[62914]: DEBUG oslo_concurrency.lockutils [req-eacf4ad2-81e8-4307-b6ff-71028e90da31 req-3420b363-364e-4942-a05b-70e888f30d9e service nova] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.573169] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 636.573484] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-60a266a5-1dea-4ada-a0b8-03315b124ba6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.582766] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 636.582766] env[62914]: value = "task-4831367" [ 636.582766] env[62914]: _type = "Task" [ 636.582766] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.594894] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831367, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.611190] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831366, 'name': CreateVM_Task, 'duration_secs': 0.377812} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.611190] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 636.611809] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.611883] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.612303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 636.612586] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd198dc2-8623-4844-bba6-3d8277ccaf67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.617721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.617944] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Instance network_info: |[{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 636.619871] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:c3:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c22c510-e137-4ee3-8038-3b784a81e04f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.631213] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating folder: Project (894c73ea90624428afeb1165afbbfa9c). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.631770] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 636.631770] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52982595-4a0a-6561-2426-7b4863c90109" [ 636.631770] env[62914]: _type = "Task" [ 636.631770] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.632092] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a1c5faa-fe79-44f1-a9bc-c0e367592116 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.644236] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52982595-4a0a-6561-2426-7b4863c90109, 'name': SearchDatastore_Task, 'duration_secs': 0.015236} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.644556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.644805] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 636.645085] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.651326] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created folder: Project (894c73ea90624428afeb1165afbbfa9c) in parent group-v941773. [ 636.651530] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating folder: Instances. Parent ref: group-v941833. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.651800] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01bf1ec5-0449-4b2a-9e13-01a624a5e18a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.664144] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created folder: Instances in parent group-v941833. [ 636.664428] env[62914]: DEBUG oslo.service.loopingcall [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.664648] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 636.667533] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b086d4cf-3abe-4c46-ac83-8eaa2450fdab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.690586] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.690586] env[62914]: value = "task-4831370" [ 636.690586] env[62914]: _type = "Task" [ 636.690586] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.705167] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831370, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.786125] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831363, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.787751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.788018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.788212] env[62914]: DEBUG nova.network.neutron [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.792448] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 636.840823] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 636.939343] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109081-3416-60be-d0b8-be531fadda53, 'name': SearchDatastore_Task, 'duration_secs': 0.011541} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.942400] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.942703] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ef521e82-38ab-4d62-b434-da7f7fa8c50f/ef521e82-38ab-4d62-b434-da7f7fa8c50f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 636.943837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.944057] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.944315] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b47c350-93a8-4960-960e-7d76bc17cb32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.947495] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b48d1d41-1c3d-46d0-9eb4-f4736485412f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.956856] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 636.956856] env[62914]: value = "task-4831371" [ 636.956856] env[62914]: _type = "Task" [ 636.956856] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.967675] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.967883] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 636.969560] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0394aa15-fc65-445a-80f1-dc525df55482 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.976689] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.982610] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 636.982610] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d94f12-6fa5-39af-faed-135ad4ad1358" [ 636.982610] env[62914]: _type = "Task" [ 636.982610] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.997802] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d94f12-6fa5-39af-faed-135ad4ad1358, 'name': SearchDatastore_Task, 'duration_secs': 0.013075} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.999019] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a0c29e-1be9-44f8-9d82-4c4274983ddc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.009408] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 637.009408] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526d7c87-ea9a-9105-eb0a-ad03f34e29c9" [ 637.009408] env[62914]: _type = "Task" [ 637.009408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.021056] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526d7c87-ea9a-9105-eb0a-ad03f34e29c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.095985] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831367, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.209377] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831370, 'name': CreateVM_Task, 'duration_secs': 0.357786} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.212693] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 637.213653] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.213821] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.214146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 637.214405] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad84ae99-af91-4718-a9cb-dacf6ec44187 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.220447] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 637.220447] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eacace-92d7-e456-5995-3f1ea25b5cff" [ 637.220447] env[62914]: _type = "Task" [ 637.220447] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.232024] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eacace-92d7-e456-5995-3f1ea25b5cff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.287722] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831363, 'name': CloneVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.319846] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.342351] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e34831-367b-4f9e-91a5-59c5bc336488 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.360000] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7acfef2-dba8-4557-afa0-9be4aa692dd9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.402376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.403882] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa5738f-016a-4fb6-80c7-b46831375c3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.414275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da39695-ac7e-4912-b39c-97a4388b7bcb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.430742] env[62914]: DEBUG nova.compute.provider_tree [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.469092] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831371, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.522773] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526d7c87-ea9a-9105-eb0a-ad03f34e29c9, 'name': SearchDatastore_Task, 'duration_secs': 0.021276} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.523265] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.523265] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ff2cff97-1671-4f97-8f69-532253169ff8/ff2cff97-1671-4f97-8f69-532253169ff8.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 637.525228] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28cd7e6d-da95-40c3-88ff-84314c9853e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.535022] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 637.535022] env[62914]: value = "task-4831372" [ 637.535022] env[62914]: _type = "Task" [ 637.535022] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.547147] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.598278] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831367, 'name': CreateSnapshot_Task, 'duration_secs': 0.811134} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.598670] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 637.599534] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2c9765-082a-4ae5-8c34-1f17bb4935d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.740630] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eacace-92d7-e456-5995-3f1ea25b5cff, 'name': SearchDatastore_Task, 'duration_secs': 0.010294} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.741601] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Successfully updated port: 90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 637.742870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.743122] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.743366] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.748022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.748022] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.748022] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a5ed572-e0f2-4af9-921f-603119695e0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.761267] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.761267] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 637.761377] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebbbab7c-acca-4162-b570-66273c8bda30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.773867] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 637.773867] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52837d48-2df3-752f-305b-c7e405ae3e41" [ 637.773867] env[62914]: _type = "Task" [ 637.773867] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.786538] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52837d48-2df3-752f-305b-c7e405ae3e41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.791469] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831363, 'name': CloneVM_Task, 'duration_secs': 1.625016} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.791948] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Created linked-clone VM from snapshot [ 637.794107] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4167727-1792-402d-a32f-d517cc6e0cfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.803935] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Uploading image 496f8fb7-0dbe-4b05-beb3-3f40fdbfb135 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 637.812085] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "934a0ca3-d879-4b23-90fe-2c190c201a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.812903] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.837565] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 637.837565] env[62914]: value = "vm-941829" [ 637.837565] env[62914]: _type = "VirtualMachine" [ 637.837565] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 637.838057] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-abb4198d-8892-4f8d-8eea-e6b9f483d32d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.848665] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease: (returnval){ [ 637.848665] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d23765-fd75-6759-0941-20bec48ac541" [ 637.848665] env[62914]: _type = "HttpNfcLease" [ 637.848665] env[62914]: } obtained for exporting VM: (result){ [ 637.848665] env[62914]: value = "vm-941829" [ 637.848665] env[62914]: _type = "VirtualMachine" [ 637.848665] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 637.849238] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the lease: (returnval){ [ 637.849238] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d23765-fd75-6759-0941-20bec48ac541" [ 637.849238] env[62914]: _type = "HttpNfcLease" [ 637.849238] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 637.861036] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 637.861036] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d23765-fd75-6759-0941-20bec48ac541" [ 637.861036] env[62914]: _type = "HttpNfcLease" [ 637.861036] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 637.937196] env[62914]: DEBUG nova.scheduler.client.report [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.944503] env[62914]: DEBUG nova.network.neutron [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.974839] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.977227] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ef521e82-38ab-4d62-b434-da7f7fa8c50f/ef521e82-38ab-4d62-b434-da7f7fa8c50f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 637.977227] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 637.977810] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad89542e-2858-40e7-a32a-0d44f9f171ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.992491] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 637.992491] env[62914]: value = "task-4831374" [ 637.992491] env[62914]: _type = "Task" [ 637.992491] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.005920] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.049933] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831372, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.122480] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 638.122850] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b646d17b-632e-4e45-b7e4-920ff68d872c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.131883] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 638.131883] env[62914]: value = "task-4831375" [ 638.131883] env[62914]: _type = "Task" [ 638.131883] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.143995] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.251155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.251155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.251155] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 638.285958] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52837d48-2df3-752f-305b-c7e405ae3e41, 'name': SearchDatastore_Task, 'duration_secs': 0.061856} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.286888] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0171e70-57ec-48ab-b136-1df947dc7aed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.297146] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 638.297146] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c5042b-3a93-382c-a6ce-31350f4cde1c" [ 638.297146] env[62914]: _type = "Task" [ 638.297146] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.310366] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c5042b-3a93-382c-a6ce-31350f4cde1c, 'name': SearchDatastore_Task, 'duration_secs': 0.011055} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.310680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.311570] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 638.311570] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ac7986b-78a1-4822-acbf-7c6242ff05ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.320446] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 638.320446] env[62914]: value = "task-4831376" [ 638.320446] env[62914]: _type = "Task" [ 638.320446] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.330792] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.359294] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 638.359294] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d23765-fd75-6759-0941-20bec48ac541" [ 638.359294] env[62914]: _type = "HttpNfcLease" [ 638.359294] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 638.359294] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 638.359294] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d23765-fd75-6759-0941-20bec48ac541" [ 638.359294] env[62914]: _type = "HttpNfcLease" [ 638.359294] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 638.360417] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06597c6d-96dd-4767-a466-077d3650eea7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.371556] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 638.371770] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 638.442743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.443592] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 638.449504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.323s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.450705] env[62914]: DEBUG nova.objects.instance [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lazy-loading 'resources' on Instance uuid 61e36e7b-aaa1-420e-bd43-f0184b56581b {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 638.451765] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.503950] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091463} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.503950] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 638.504757] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f74d8c6-980e-476b-9db1-302cafe52e39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.537202] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] ef521e82-38ab-4d62-b434-da7f7fa8c50f/ef521e82-38ab-4d62-b434-da7f7fa8c50f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 638.543864] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1999e26a-c565-4f2f-bf51-1ed8b89d9a32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.569759] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-349f8d1f-c663-4e04-879e-15ca1e411392 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.573873] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-changed-a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 638.575775] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing instance network info cache due to event network-changed-a2583c61-5b1a-4a33-8206-4f81fca1b131. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 638.575775] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Acquiring lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.575775] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Acquired lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.575775] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Refreshing network info cache for port a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 638.587698] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570842} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.589298] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ff2cff97-1671-4f97-8f69-532253169ff8/ff2cff97-1671-4f97-8f69-532253169ff8.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 638.589669] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 638.589961] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 638.589961] env[62914]: value = "task-4831377" [ 638.589961] env[62914]: _type = "Task" [ 638.589961] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.591294] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2800add-7900-415f-8d87-1df539ef5172 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.607023] env[62914]: DEBUG nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Received event network-vif-deleted-b0ce4bc2-c00c-4a33-98a7-b813f5b73a2b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 638.607023] env[62914]: DEBUG nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Received event network-vif-plugged-1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 638.607245] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.607319] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.607511] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.607673] env[62914]: DEBUG nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] No waiting events found dispatching network-vif-plugged-1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 638.607863] env[62914]: WARNING nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Received unexpected event network-vif-plugged-1c22c510-e137-4ee3-8038-3b784a81e04f for instance with vm_state building and task_state spawning. [ 638.608041] env[62914]: DEBUG nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Received event network-changed-1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 638.608208] env[62914]: DEBUG nova.compute.manager [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Refreshing instance network info cache due to event network-changed-1c22c510-e137-4ee3-8038-3b784a81e04f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 638.608393] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.608526] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.608718] env[62914]: DEBUG nova.network.neutron [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Refreshing network info cache for port 1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 638.614991] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 638.614991] env[62914]: value = "task-4831378" [ 638.614991] env[62914]: _type = "Task" [ 638.614991] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.623132] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.633751] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.653566] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.838265] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831376, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.880603] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.963392] env[62914]: DEBUG nova.compute.utils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.966019] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 638.966019] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 639.000346] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b466f68-7a92-4ee3-9f2e-f294bf4106d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.034782] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d7b84f-13b6-4f61-a78a-c3c68ac76fe8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.047131] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 639.130455] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831377, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.148283] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831378, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.159509] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.254199] env[62914]: DEBUG nova.policy [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6dc3dcfc0de546dcb49d7d88d6432a1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a48cfbcf1a492cbbca942d7ddb570d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 639.355197] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590326} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.359068] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 639.359068] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 639.359932] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e26e86c-5eb1-43d2-a283-4550c8a10212 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.370232] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 639.370232] env[62914]: value = "task-4831379" [ 639.370232] env[62914]: _type = "Task" [ 639.370232] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.392049] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.469768] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 639.558163] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 639.560332] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cff8db5b-afd2-4254-8ce4-411eb6e80270 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.573839] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 639.573839] env[62914]: value = "task-4831380" [ 639.573839] env[62914]: _type = "Task" [ 639.573839] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.587837] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.589332] env[62914]: DEBUG nova.network.neutron [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Updating instance_info_cache with network_info: [{"id": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "address": "fa:16:3e:7a:9b:f5", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cb4968-f2", "ovs_interfaceid": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.611267] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831377, 'name': ReconfigVM_Task, 'duration_secs': 0.555037} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.611267] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Reconfigured VM instance instance-00000011 to attach disk [datastore2] ef521e82-38ab-4d62-b434-da7f7fa8c50f/ef521e82-38ab-4d62-b434-da7f7fa8c50f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.611768] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42e2212d-a050-458e-a237-9341aed10fa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.624737] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 639.624737] env[62914]: value = "task-4831381" [ 639.624737] env[62914]: _type = "Task" [ 639.624737] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.637399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1fd48c-8035-4fff-b051-bb3d674ca256 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.648934] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.544454} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.655534] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.656391] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831381, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.659161] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c0a576-c9a4-4978-8b43-ddef70adeb32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.662767] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cdf955-41fe-4be6-8b17-529b9953e7d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.672829] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.719237] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] ff2cff97-1671-4f97-8f69-532253169ff8/ff2cff97-1671-4f97-8f69-532253169ff8.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.720547] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updated VIF entry in instance network info cache for port a2583c61-5b1a-4a33-8206-4f81fca1b131. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 639.721348] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updating instance_info_cache with network_info: [{"id": "668a09a1-427d-4507-b7d0-45cab066cac8", "address": "fa:16:3e:30:a2:8d", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap668a09a1-42", "ovs_interfaceid": "668a09a1-427d-4507-b7d0-45cab066cac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1cdb314-5a29-443e-8562-bced871a8df1", "address": "fa:16:3e:46:47:35", "network": {"id": "f2647c62-0d1d-4453-a988-588ff12a0d92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020261419", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1cdb314-5a", "ovs_interfaceid": "a1cdb314-5a29-443e-8562-bced871a8df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "address": "fa:16:3e:bd:4b:0b", "network": {"id": "1f0e9d84-4ad9-477e-a396-a17ccdaaba76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1847945972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2583c61-5b", "ovs_interfaceid": "a2583c61-5b1a-4a33-8206-4f81fca1b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.726261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38821229-b03c-4636-b162-44c439560cbb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.736327] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9120f82-3fff-4ce3-ae00-ee1152a78caa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.752381] env[62914]: DEBUG nova.network.neutron [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updated VIF entry in instance network info cache for port 1c22c510-e137-4ee3-8038-3b784a81e04f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 639.753412] env[62914]: DEBUG nova.network.neutron [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.765148] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28a3770-381a-4299-953f-7861de3d25cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.768940] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 639.768940] env[62914]: value = "task-4831382" [ 639.768940] env[62914]: _type = "Task" [ 639.768940] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.782877] env[62914]: DEBUG nova.compute.provider_tree [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.793014] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831382, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.884417] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079994} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.884828] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.886029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033fe4ab-48fd-493b-9eef-cfe619cc42da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.911714] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.912818] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ebfebbf-202e-4d58-ae26-c0de2e1669c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.935296] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 639.935296] env[62914]: value = "task-4831383" [ 639.935296] env[62914]: _type = "Task" [ 639.935296] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.943467] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.085935] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831380, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.100578] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.100967] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance network_info: |[{"id": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "address": "fa:16:3e:7a:9b:f5", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cb4968-f2", "ovs_interfaceid": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 640.101330] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:9b:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90cb4968-f2bd-4e77-9d1a-d66dcdf73599', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.109719] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating folder: Project (14ea39ac6e2d400ca89bbffc20d764ef). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.110136] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9389e23e-6402-42c7-b133-167b54955766 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.123992] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created folder: Project (14ea39ac6e2d400ca89bbffc20d764ef) in parent group-v941773. [ 640.124517] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating folder: Instances. Parent ref: group-v941838. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.124640] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4911e8c7-d893-46eb-b4a2-9158464a2d30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.146548] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831381, 'name': Rename_Task, 'duration_secs': 0.223388} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.151970] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 640.153545] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created folder: Instances in parent group-v941838. [ 640.154249] env[62914]: DEBUG oslo.service.loopingcall [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.154602] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24a148f0-d6e0-46c7-b5a5-24d6e19edb5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.159808] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 640.160829] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e2e6e63-bc82-4321-b3df-52125e3766c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.184755] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.189289] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 640.189289] env[62914]: value = "task-4831386" [ 640.189289] env[62914]: _type = "Task" [ 640.189289] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.194287] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.194287] env[62914]: value = "task-4831387" [ 640.194287] env[62914]: _type = "Task" [ 640.194287] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.204524] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Successfully created port: e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.205769] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.215180] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831387, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Releasing lock "refresh_cache-ef521e82-38ab-4d62-b434-da7f7fa8c50f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.259509] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Received event network-vif-plugged-510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Acquiring lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.259509] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] No waiting events found dispatching network-vif-plugged-510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 640.259509] env[62914]: WARNING nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Received unexpected event network-vif-plugged-510995e6-4d3c-4b63-ae4e-d4c1f74254e8 for instance with vm_state building and task_state spawning. [ 640.259509] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Received event network-changed-510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 640.259509] env[62914]: DEBUG nova.compute.manager [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Refreshing instance network info cache due to event network-changed-510995e6-4d3c-4b63-ae4e-d4c1f74254e8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Acquiring lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.259509] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Acquired lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.259509] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Refreshing network info cache for port 510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 640.261452] env[62914]: DEBUG oslo_concurrency.lockutils [req-fdb3afb6-af41-4449-a273-131972167262 req-79874c8c-5d3c-4d60-a993-a0211c424101 service nova] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.290435] env[62914]: DEBUG nova.scheduler.client.report [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.294058] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831382, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.445920] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831383, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.484442] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 640.519771] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 640.519771] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 640.519771] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.519771] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 640.519771] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.520021] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 640.520298] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 640.520579] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 640.520700] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 640.521013] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 640.521266] env[62914]: DEBUG nova.virt.hardware [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 640.522178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38d46ba-b8a7-4dd8-b4ea-a7db25b5c6c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.532881] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f39d5e-dcd2-45e8-bbbc-393088a6dcc7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.585959] env[62914]: DEBUG oslo_vmware.api [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831380, 'name': PowerOnVM_Task, 'duration_secs': 0.618993} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.586350] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 640.586573] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c82c2762-af44-424a-a842-70c09c99af66 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance 'aede8da7-8bf2-4963-b08b-6e06007614a5' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 640.665289] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831375, 'name': CloneVM_Task, 'duration_secs': 2.208733} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.668051] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Created linked-clone VM from snapshot [ 640.670213] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5908b6d1-0ac2-4809-9577-bff3278c8ea2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.687043] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Uploading image 0ea8d590-d90d-411e-b268-8ae777454c07 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 640.718382] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831386, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.719196] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831387, 'name': CreateVM_Task, 'duration_secs': 0.45502} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.725853] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 640.725853] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.725853] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.726380] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 640.726669] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d86b3c4c-32e1-4973-bb89-ddd04da77cf1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.738737] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 640.738737] env[62914]: value = "vm-941837" [ 640.738737] env[62914]: _type = "VirtualMachine" [ 640.738737] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 640.741332] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d122a559-8fce-4779-8f73-68103f7e0cc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.743610] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 640.743610] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52469c1a-ff2f-212b-752d-edf3ef633891" [ 640.743610] env[62914]: _type = "Task" [ 640.743610] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.750210] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lease: (returnval){ [ 640.750210] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525e51d1-883c-ff49-9c83-b62bb90e69ec" [ 640.750210] env[62914]: _type = "HttpNfcLease" [ 640.750210] env[62914]: } obtained for exporting VM: (result){ [ 640.750210] env[62914]: value = "vm-941837" [ 640.750210] env[62914]: _type = "VirtualMachine" [ 640.750210] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 640.750669] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the lease: (returnval){ [ 640.750669] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525e51d1-883c-ff49-9c83-b62bb90e69ec" [ 640.750669] env[62914]: _type = "HttpNfcLease" [ 640.750669] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 640.757560] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52469c1a-ff2f-212b-752d-edf3ef633891, 'name': SearchDatastore_Task, 'duration_secs': 0.01259} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.762887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.763324] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.763693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.763919] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.764232] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 640.764684] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e54c187f-d95d-4094-89ea-eb27857c4b0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.775419] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 640.775419] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525e51d1-883c-ff49-9c83-b62bb90e69ec" [ 640.775419] env[62914]: _type = "HttpNfcLease" [ 640.775419] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 640.780263] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 640.780263] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525e51d1-883c-ff49-9c83-b62bb90e69ec" [ 640.780263] env[62914]: _type = "HttpNfcLease" [ 640.780263] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 640.780402] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1484e7c7-c996-47d5-a193-52423d00c06e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.785372] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.785591] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 640.788608] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dc83a69-f56f-4583-8375-12bce0f984d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.797028] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831382, 'name': ReconfigVM_Task, 'duration_secs': 0.536929} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.797351] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 640.798282] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 640.799282] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Reconfigured VM instance instance-00000012 to attach disk [datastore2] ff2cff97-1671-4f97-8f69-532253169ff8/ff2cff97-1671-4f97-8f69-532253169ff8.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 640.800861] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.351s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.806418] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-458232b9-2b26-4cdb-a9f3-17589c06ea5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.808875] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.054s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.809419] env[62914]: DEBUG nova.objects.instance [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lazy-loading 'resources' on Instance uuid 9e39cfb8-e277-4798-92b0-b54f310ef2f4 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 640.811248] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 640.811248] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5223b0b3-0609-ca19-cb88-26f3b449c67b" [ 640.811248] env[62914]: _type = "Task" [ 640.811248] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.866500] env[62914]: INFO nova.scheduler.client.report [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Deleted allocations for instance 61e36e7b-aaa1-420e-bd43-f0184b56581b [ 640.883171] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 640.883171] env[62914]: value = "task-4831389" [ 640.883171] env[62914]: _type = "Task" [ 640.883171] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.890134] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5223b0b3-0609-ca19-cb88-26f3b449c67b, 'name': SearchDatastore_Task, 'duration_secs': 0.011006} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.893278] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a784313c-611c-48d3-89b1-f080d21cb7a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.897584] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831389, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.904492] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 640.904492] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521e1692-8b96-fd5c-a14b-71d6d400e692" [ 640.904492] env[62914]: _type = "Task" [ 640.904492] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.919940] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521e1692-8b96-fd5c-a14b-71d6d400e692, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.934346] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9c0ca195-677a-4e84-92fe-89f3c741d092 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.946164] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831383, 'name': ReconfigVM_Task, 'duration_secs': 0.516362} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.947954] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 640.949888] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05b420d3-41dc-4057-a2da-454edb5b8d5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.961165] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 640.961165] env[62914]: value = "task-4831390" [ 640.961165] env[62914]: _type = "Task" [ 640.961165] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.974263] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831390, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.203773] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831386, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.399905] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3227f128-576f-46c8-9aa2-c7b16e27e4e6 tempest-InstanceActionsTestJSON-94509073 tempest-InstanceActionsTestJSON-94509073-project-member] Lock "61e36e7b-aaa1-420e-bd43-f0184b56581b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.423590] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831389, 'name': Rename_Task, 'duration_secs': 0.163444} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.424264] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 641.425311] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b676929b-3071-4ca9-bab5-50a3a978ee25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.434180] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521e1692-8b96-fd5c-a14b-71d6d400e692, 'name': SearchDatastore_Task, 'duration_secs': 0.012708} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.439087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.439087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 641.441893] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1e50816-5b2a-4ec1-9473-a36711c496f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.448220] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 641.448220] env[62914]: value = "task-4831391" [ 641.448220] env[62914]: _type = "Task" [ 641.448220] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.454800] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 641.454800] env[62914]: value = "task-4831392" [ 641.454800] env[62914]: _type = "Task" [ 641.454800] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.465808] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.478297] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.489384] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831390, 'name': Rename_Task, 'duration_secs': 0.196309} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.489768] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 641.490302] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ca0a0fd-7257-4116-99db-d7f7bfc9e4b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.500808] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 641.500808] env[62914]: value = "task-4831393" [ 641.500808] env[62914]: _type = "Task" [ 641.500808] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.530446] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831393, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.674579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.675608] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.717359] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831386, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.776190] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Updated VIF entry in instance network info cache for port 510995e6-4d3c-4b63-ae4e-d4c1f74254e8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 641.777261] env[62914]: DEBUG nova.network.neutron [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Updating instance_info_cache with network_info: [{"id": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "address": "fa:16:3e:05:02:e6", "network": {"id": "ee66e17c-2591-46b1-8585-3840b7e234ee", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629786297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087c2e0c12a8423cace93462477bb622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap510995e6-4d", "ovs_interfaceid": "510995e6-4d3c-4b63-ae4e-d4c1f74254e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.821339] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Received event network-vif-plugged-90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 641.822497] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Acquiring lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.822497] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.822497] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.823084] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] No waiting events found dispatching network-vif-plugged-90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 641.823084] env[62914]: WARNING nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Received unexpected event network-vif-plugged-90cb4968-f2bd-4e77-9d1a-d66dcdf73599 for instance with vm_state building and task_state spawning. [ 641.823084] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 641.823084] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing instance network info cache due to event network-changed-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 641.823424] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Acquiring lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.823424] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Acquired lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.823499] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Refreshing network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 641.968709] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831391, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.978975] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831392, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.018301] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831393, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.025754] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4a8456-b95c-4360-a250-3c0a08475fb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.040526] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c86cd7-39a2-4ee8-9d67-2f01c42516f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.865725] env[62914]: DEBUG oslo_concurrency.lockutils [req-923dc2ad-c91c-4c27-b368-9fdffb656807 req-3456a5b7-1815-4df0-9123-f029e9037a06 service nova] Releasing lock "refresh_cache-ff2cff97-1671-4f97-8f69-532253169ff8" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.881898] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1558edf-6ac1-4511-821f-09d7ec949b99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.890020] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Successfully updated port: e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.901322] env[62914]: DEBUG oslo_vmware.api [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831391, 'name': PowerOnVM_Task, 'duration_secs': 0.857122} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.901739] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59549} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.908365] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 642.908733] env[62914]: INFO nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Took 12.53 seconds to spawn the instance on the hypervisor. [ 642.909137] env[62914]: DEBUG nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 642.909434] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 642.909722] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.910120] env[62914]: DEBUG oslo_vmware.api [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831393, 'name': PowerOnVM_Task, 'duration_secs': 0.741396} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.910418] env[62914]: DEBUG oslo_vmware.api [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831386, 'name': PowerOnVM_Task, 'duration_secs': 1.551469} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.914666] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585c7667-7efa-4498-a976-d250a1d386ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.917520] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0306bd1a-9684-4cda-a090-fa60dbf4a9e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.920043] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 642.920522] env[62914]: INFO nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 9.90 seconds to spawn the instance on the hypervisor. [ 642.921178] env[62914]: DEBUG nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 642.921178] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 642.921422] env[62914]: INFO nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Took 20.88 seconds to spawn the instance on the hypervisor. [ 642.921422] env[62914]: DEBUG nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 642.924816] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f83da7-c2ec-46e4-a866-b232cc6f6ece {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.925725] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86a6ed9-507e-45ed-bc3c-dd6f2d6d0f55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.929523] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4764d1e-d8cd-405d-9e47-8d3a531f4d85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.960363] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 642.960363] env[62914]: value = "task-4831394" [ 642.960363] env[62914]: _type = "Task" [ 642.960363] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.965210] env[62914]: DEBUG nova.compute.provider_tree [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.976076] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.219921] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updated VIF entry in instance network info cache for port 1e7a9bbc-61fa-4ecf-8142-2568ff07c25c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 643.219921] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updating instance_info_cache with network_info: [{"id": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "address": "fa:16:3e:ec:dc:0a", "network": {"id": "ba7c830d-c4a1-418f-a747-d997a634b13f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1412355570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33e55050ffe94a588a5db112563b5555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e7a9bbc-61", "ovs_interfaceid": "1e7a9bbc-61fa-4ecf-8142-2568ff07c25c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.393184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.393184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.393184] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.497334] env[62914]: DEBUG nova.scheduler.client.report [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.511603] env[62914]: INFO nova.compute.manager [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Took 36.88 seconds to build instance. [ 643.526316] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171113} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.528105] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.529724] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51391153-a1d6-42f5-9155-a4ed6f6da4d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.562040] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.563333] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79a97205-7801-4635-8d4b-953886ebbb3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.584674] env[62914]: INFO nova.compute.manager [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Took 32.80 seconds to build instance. [ 643.586060] env[62914]: INFO nova.compute.manager [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 30.58 seconds to build instance. [ 643.590845] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 643.590845] env[62914]: value = "task-4831395" [ 643.590845] env[62914]: _type = "Task" [ 643.590845] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.602676] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.722675] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Releasing lock "refresh_cache-2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.723101] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Received event network-changed-90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 643.723357] env[62914]: DEBUG nova.compute.manager [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Refreshing instance network info cache due to event network-changed-90cb4968-f2bd-4e77-9d1a-d66dcdf73599. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 643.723647] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Acquiring lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.723883] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Acquired lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.724160] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Refreshing network info cache for port 90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 643.787212] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.787567] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 644.000375] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.020509] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.212s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.023445] env[62914]: DEBUG oslo_concurrency.lockutils [None req-79c97665-854b-484a-83ad-aba7db2e6067 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.414s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.024093] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.012s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.026727] env[62914]: INFO nova.compute.claims [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.079681] env[62914]: INFO nova.scheduler.client.report [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Deleted allocations for instance 9e39cfb8-e277-4798-92b0-b54f310ef2f4 [ 644.088331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-24c14aa3-9047-4c89-9a40-9714f14efb68 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.740s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.090044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c111105e-e641-437e-9d10-7ce36871bf7a tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.885s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.104291] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831395, 'name': ReconfigVM_Task, 'duration_secs': 0.506272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.109304] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.110329] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-433920b8-77d6-4395-80c4-67821600b725 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.124021] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 644.124021] env[62914]: value = "task-4831396" [ 644.124021] env[62914]: _type = "Task" [ 644.124021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.148103] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831396, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.301802] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 644.301802] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 644.301802] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Rebuilding the list of instances to heal {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 644.332447] env[62914]: DEBUG nova.network.neutron [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Updating instance_info_cache with network_info: [{"id": "e321d104-ea08-4f03-8274-e8ef45dc8952", "address": "fa:16:3e:c5:19:2a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape321d104-ea", "ovs_interfaceid": "e321d104-ea08-4f03-8274-e8ef45dc8952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.540832] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 644.589682] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc0be165-60b7-49d8-aeb6-119024cbfaeb tempest-VolumesAdminNegativeTest-328242297 tempest-VolumesAdminNegativeTest-328242297-project-member] Lock "9e39cfb8-e277-4798-92b0-b54f310ef2f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.865s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.592917] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 644.597047] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 644.646348] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831396, 'name': Rename_Task, 'duration_secs': 0.224102} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.647114] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 644.647443] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c517f3f-214d-4dfc-9a06-3d5b3fe04b7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.658367] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 644.658367] env[62914]: value = "task-4831397" [ 644.658367] env[62914]: _type = "Task" [ 644.658367] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.674082] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.691415] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Updated VIF entry in instance network info cache for port 90cb4968-f2bd-4e77-9d1a-d66dcdf73599. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 644.692791] env[62914]: DEBUG nova.network.neutron [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Updating instance_info_cache with network_info: [{"id": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "address": "fa:16:3e:7a:9b:f5", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cb4968-f2", "ovs_interfaceid": "90cb4968-f2bd-4e77-9d1a-d66dcdf73599", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.811691] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 644.811883] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 644.811927] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 644.837855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.837855] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Instance network_info: |[{"id": "e321d104-ea08-4f03-8274-e8ef45dc8952", "address": "fa:16:3e:c5:19:2a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape321d104-ea", "ovs_interfaceid": "e321d104-ea08-4f03-8274-e8ef45dc8952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 644.837855] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:19:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e321d104-ea08-4f03-8274-e8ef45dc8952', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.850450] env[62914]: DEBUG oslo.service.loopingcall [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.851312] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 644.851632] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe57c4e-ecc6-4c15-8dcb-739a328cbef0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.878025] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.878025] env[62914]: value = "task-4831398" [ 644.878025] env[62914]: _type = "Task" [ 644.878025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.888464] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831398, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.904742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.904815] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.907021] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 644.907021] env[62914]: DEBUG nova.objects.instance [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lazy-loading 'info_cache' on Instance uuid 3eff61b1-b09c-4a04-821c-cefdc7be3f64 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 645.090721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.130125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.139494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.175559] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831397, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.196105] env[62914]: DEBUG oslo_concurrency.lockutils [req-7b95ee60-35d1-46ca-a5f2-70f1555ff6ea req-22b33856-c04e-48de-bf2c-08a8c30bbaeb service nova] Releasing lock "refresh_cache-70a6d3e7-6928-47a7-9f7f-bd5dad64912f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.390669] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831398, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.597436] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8976f5f-5a5c-4cf4-9278-745518f70f38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.606953] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2524615f-f0d7-45b0-9e37-ac0954ecd5f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.645083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60b59e7-cf9d-4606-aea2-77961f988306 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.654362] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3214a6f3-5ea7-45c5-b3c5-d564636ef060 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.672473] env[62914]: DEBUG nova.compute.provider_tree [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.681951] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831397, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.890730] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831398, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.173491] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831397, 'name': PowerOnVM_Task} progress is 86%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.176556] env[62914]: DEBUG nova.scheduler.client.report [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.391032] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831398, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.676259] env[62914]: DEBUG oslo_vmware.api [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831397, 'name': PowerOnVM_Task, 'duration_secs': 1.640833} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.677029] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 646.677029] env[62914]: INFO nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Took 10.94 seconds to spawn the instance on the hypervisor. [ 646.677196] env[62914]: DEBUG nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 646.678010] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a206fb93-3cb7-4dc5-9aec-4b68edc5b6fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.684864] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.685378] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 646.688167] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.986s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.689620] env[62914]: INFO nova.compute.claims [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.738455] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updating instance_info_cache with network_info: [{"id": "59265cbb-d823-43dc-a07d-d850de95a7d8", "address": "fa:16:3e:32:51:d4", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59265cbb-d8", "ovs_interfaceid": "59265cbb-d823-43dc-a07d-d850de95a7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.902974] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831398, 'name': CreateVM_Task, 'duration_secs': 1.630664} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.903203] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 646.903956] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.904176] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.904604] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 646.904924] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10db02d9-dd22-4288-bf30-723bc92b21d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.912517] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 646.912517] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb7a52-b4d3-2152-d378-7af33b3cc4b6" [ 646.912517] env[62914]: _type = "Task" [ 646.912517] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.922956] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb7a52-b4d3-2152-d378-7af33b3cc4b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.195757] env[62914]: DEBUG nova.compute.utils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.201021] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 647.201021] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 647.215713] env[62914]: INFO nova.compute.manager [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Took 32.79 seconds to build instance. [ 647.241716] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-3eff61b1-b09c-4a04-821c-cefdc7be3f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.242499] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 647.242499] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.242756] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.243479] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.243828] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.244777] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.245141] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.245371] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 647.245638] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 647.270899] env[62914]: DEBUG nova.policy [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d03194caf494166b39ec028e0b6dfa0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a4197794dad4013b8bca77e74af88f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 647.426748] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb7a52-b4d3-2152-d378-7af33b3cc4b6, 'name': SearchDatastore_Task, 'duration_secs': 0.026749} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.427118] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.427363] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.427632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.427783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.427987] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.428289] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55c25774-8d23-4969-85d7-7281ac7bdebe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.444106] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.444393] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 647.445347] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf6556b0-0579-4cc4-ac22-7cc9808b9b44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.453555] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 647.453555] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d98adc-c752-ccf2-507b-5c4e9712465d" [ 647.453555] env[62914]: _type = "Task" [ 647.453555] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.466855] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d98adc-c752-ccf2-507b-5c4e9712465d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.670061] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Successfully created port: 0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.701405] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 647.717078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0dfd2ac8-53eb-46c5-b91a-52337ef496f6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.954s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.756955] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.972839] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d98adc-c752-ccf2-507b-5c4e9712465d, 'name': SearchDatastore_Task, 'duration_secs': 0.044299} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.977496] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f90dc7b-c3ab-408e-a081-5c95193cbd98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.986787] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 647.986787] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52899dc1-1374-25d9-0ca3-ca355e63d870" [ 647.986787] env[62914]: _type = "Task" [ 647.986787] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.004163] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52899dc1-1374-25d9-0ca3-ca355e63d870, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.005992] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 648.006888] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d30e15d-c077-496c-a857-ffc3a411a67c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.016154] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 648.016154] env[62914]: ERROR oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk due to incomplete transfer. [ 648.016154] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-13107528-152d-441e-beb8-98f4234753c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.023169] env[62914]: DEBUG oslo_vmware.rw_handles [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52035831-1d25-97f6-b015-9b602e3f5123/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 648.023268] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Uploaded image 496f8fb7-0dbe-4b05-beb3-3f40fdbfb135 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 648.026269] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 648.026444] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-729139c6-c093-45b5-b56b-446d843f3089 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.035212] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 648.035212] env[62914]: value = "task-4831399" [ 648.035212] env[62914]: _type = "Task" [ 648.035212] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.050699] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831399, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.173013] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c56a37c-5ee9-48d4-9ae6-668f9472aab2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.184527] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10c0180-c9ee-4771-aecb-c501fc4c269b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.232021] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfa7cd1-7d29-4be3-b4b4-4f2c2b2f62c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.238432] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6953bd3-0f12-43ad-9b9d-ae65868a0c04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.253905] env[62914]: DEBUG nova.compute.provider_tree [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.500198] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52899dc1-1374-25d9-0ca3-ca355e63d870, 'name': SearchDatastore_Task, 'duration_secs': 0.03103} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.500431] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.501236] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 69a9cd15-7d6f-464d-b451-e193179088f7/69a9cd15-7d6f-464d-b451-e193179088f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 648.501608] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3dfef98d-e82e-46dd-882a-476b582435b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.513747] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 648.513747] env[62914]: value = "task-4831400" [ 648.513747] env[62914]: _type = "Task" [ 648.513747] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.530197] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.548902] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831399, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.597825] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.598245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.598481] env[62914]: DEBUG nova.compute.manager [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Going to confirm migration 1 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 648.733994] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 648.758902] env[62914]: DEBUG nova.scheduler.client.report [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 648.766694] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.766985] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.767167] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.767359] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.767512] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.767669] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.768016] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.768358] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.768610] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.769252] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.770758] env[62914]: DEBUG nova.virt.hardware [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.774688] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d908886-371e-45eb-af66-5533804f65e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.788228] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aceeec5-6c6d-4208-8436-07e0438d797c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.023047] env[62914]: DEBUG nova.compute.manager [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Received event network-vif-plugged-e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 649.023047] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Acquiring lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.025728] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.025728] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.025728] env[62914]: DEBUG nova.compute.manager [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] No waiting events found dispatching network-vif-plugged-e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 649.025728] env[62914]: WARNING nova.compute.manager [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Received unexpected event network-vif-plugged-e321d104-ea08-4f03-8274-e8ef45dc8952 for instance with vm_state building and task_state spawning. [ 649.025728] env[62914]: DEBUG nova.compute.manager [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Received event network-changed-e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 649.025728] env[62914]: DEBUG nova.compute.manager [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Refreshing instance network info cache due to event network-changed-e321d104-ea08-4f03-8274-e8ef45dc8952. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 649.025728] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Acquiring lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.025728] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Acquired lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.025728] env[62914]: DEBUG nova.network.neutron [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Refreshing network info cache for port e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 649.031449] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831400, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.057711] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831399, 'name': Destroy_Task, 'duration_secs': 0.723885} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.058081] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Destroyed the VM [ 649.058798] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 649.058909] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-559a30e6-6e9b-43b7-872e-aa9aa2ea5537 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.068619] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 649.068619] env[62914]: value = "task-4831401" [ 649.068619] env[62914]: _type = "Task" [ 649.068619] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.082712] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831401, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.250213] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.250570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.250631] env[62914]: DEBUG nova.network.neutron [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.250828] env[62914]: DEBUG nova.objects.instance [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lazy-loading 'info_cache' on Instance uuid aede8da7-8bf2-4963-b08b-6e06007614a5 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.283820] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.284623] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 649.289886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.725s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.292658] env[62914]: INFO nova.compute.claims [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.528973] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.98723} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.528973] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 69a9cd15-7d6f-464d-b451-e193179088f7/69a9cd15-7d6f-464d-b451-e193179088f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 649.529256] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 649.529649] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6fe3e75-a946-4a5f-b074-6eb953cd8802 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.543078] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 649.543078] env[62914]: value = "task-4831402" [ 649.543078] env[62914]: _type = "Task" [ 649.543078] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.562038] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.588529] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831401, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.596223] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.596223] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.800889] env[62914]: DEBUG nova.compute.utils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 649.808009] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 649.808562] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 649.957567] env[62914]: DEBUG nova.policy [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ddee8b8fb0e4ffea8e02b2b9f7a9846', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df7ae349aea0487d88689eb09933eb1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 650.052443] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158238} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.052783] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 650.053676] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624c2bbd-c147-4200-8174-20bc07a20046 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.084830] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 69a9cd15-7d6f-464d-b451-e193179088f7/69a9cd15-7d6f-464d-b451-e193179088f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 650.089337] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d54978d-1b9b-4476-b828-1d3fec5c2e48 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.104628] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 650.114403] env[62914]: DEBUG oslo_vmware.api [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831401, 'name': RemoveSnapshot_Task, 'duration_secs': 0.72056} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.115769] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 650.116829] env[62914]: INFO nova.compute.manager [None req-cb8b5ef3-3979-4406-a37a-2bcc3db5154a tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Took 16.01 seconds to snapshot the instance on the hypervisor. [ 650.118592] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 650.118592] env[62914]: value = "task-4831403" [ 650.118592] env[62914]: _type = "Task" [ 650.118592] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.129368] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.254578] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.254973] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.255131] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.255745] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.255745] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.261240] env[62914]: INFO nova.compute.manager [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Terminating instance [ 650.266022] env[62914]: DEBUG nova.compute.manager [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 650.266163] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 650.268235] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb5c46f-18ee-4075-882c-a3a1aeddd63f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.281059] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 650.281301] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa587bc4-6ba1-4333-a629-510cbffcc6b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.289463] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 650.289463] env[62914]: value = "task-4831404" [ 650.289463] env[62914]: _type = "Task" [ 650.289463] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.295529] env[62914]: DEBUG nova.network.neutron [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Updated VIF entry in instance network info cache for port e321d104-ea08-4f03-8274-e8ef45dc8952. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 650.295984] env[62914]: DEBUG nova.network.neutron [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Updating instance_info_cache with network_info: [{"id": "e321d104-ea08-4f03-8274-e8ef45dc8952", "address": "fa:16:3e:c5:19:2a", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape321d104-ea", "ovs_interfaceid": "e321d104-ea08-4f03-8274-e8ef45dc8952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.305842] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.309150] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 650.434149] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Successfully updated port: 0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 650.642717] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.649676] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.704268] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Successfully created port: e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.799886] env[62914]: DEBUG oslo_concurrency.lockutils [req-a7cbfdd5-686d-40a3-b074-65fa675cd69f req-e1eafacc-4d9f-4a4c-ab51-9510b365517a service nova] Releasing lock "refresh_cache-69a9cd15-7d6f-464d-b451-e193179088f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.805744] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831404, 'name': PowerOffVM_Task, 'duration_secs': 0.419278} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.809597] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 650.809597] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 650.810084] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccff7447-dc6c-46a5-8e4b-b9128a92270b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.900848] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 650.900848] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 650.900939] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleting the datastore file [datastore1] e1018767-71e4-49c9-bd4d-02eae39dc26b {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 650.904699] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa083a1a-8935-43f6-b2d3-3db7a091c187 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.915714] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 650.915714] env[62914]: value = "task-4831406" [ 650.915714] env[62914]: _type = "Task" [ 650.915714] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.930501] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831406, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.940632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.940632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquired lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.940823] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.949899] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba48377-0f3b-462f-9fb0-c78dd6ed07ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.960246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c3a027-0710-46f3-ac42-1a3b5be831e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.004650] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85898373-5f4c-4e02-ab56-f6e546cbc8ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.015471] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db36d00-8a27-4fa9-81ff-2adba07f2c54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.032040] env[62914]: DEBUG nova.compute.provider_tree [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.043992] env[62914]: DEBUG nova.network.neutron [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.139292] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831403, 'name': ReconfigVM_Task, 'duration_secs': 0.695998} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.141464] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 69a9cd15-7d6f-464d-b451-e193179088f7/69a9cd15-7d6f-464d-b451-e193179088f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.142691] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14e4f502-8b31-438d-a87d-de211192ba23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.153741] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 651.153741] env[62914]: value = "task-4831407" [ 651.153741] env[62914]: _type = "Task" [ 651.153741] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.168334] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831407, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.332251] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 651.387910] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.388789] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.388885] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.389803] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.390059] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.390260] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.391348] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.391348] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.391348] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.391348] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.391348] env[62914]: DEBUG nova.virt.hardware [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.393871] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7047232-cec3-4681-b607-0428ac91e8af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.404132] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ac17eb-8043-4617-81be-482636a14535 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.431028] env[62914]: DEBUG oslo_vmware.api [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831406, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330115} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.431028] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.431028] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 651.431028] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 651.431441] env[62914]: INFO nova.compute.manager [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 651.431524] env[62914]: DEBUG oslo.service.loopingcall [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.432438] env[62914]: DEBUG nova.compute.manager [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 651.432438] env[62914]: DEBUG nova.network.neutron [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 651.535251] env[62914]: DEBUG nova.scheduler.client.report [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.545940] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.545940] env[62914]: DEBUG nova.objects.instance [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lazy-loading 'migration_context' on Instance uuid aede8da7-8bf2-4963-b08b-6e06007614a5 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 651.639763] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.669597] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831407, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.677052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.677944] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.677944] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.678184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.678280] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.680991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "567f3d61-ed30-47d9-aebc-77c9392be506" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.680991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.681511] env[62914]: INFO nova.compute.manager [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Terminating instance [ 651.684792] env[62914]: DEBUG nova.compute.manager [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 651.685034] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 651.686499] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668cd2ad-881a-4a0a-88a1-1baa08f16b12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.697222] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 651.697820] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3d44519-4b69-4eb7-b1d5-fa073e74d9dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.706985] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 651.706985] env[62914]: value = "task-4831408" [ 651.706985] env[62914]: _type = "Task" [ 651.706985] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.716520] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.744959] env[62914]: DEBUG nova.compute.manager [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Received event network-vif-plugged-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 651.745266] env[62914]: DEBUG oslo_concurrency.lockutils [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] Acquiring lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.745497] env[62914]: DEBUG oslo_concurrency.lockutils [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.745674] env[62914]: DEBUG oslo_concurrency.lockutils [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.745844] env[62914]: DEBUG nova.compute.manager [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] No waiting events found dispatching network-vif-plugged-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 651.746100] env[62914]: WARNING nova.compute.manager [req-df91afcf-28ba-4092-8165-480130413c8b req-b51d9c44-fb00-4aa7-8b76-2a33b9c8acb7 service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Received unexpected event network-vif-plugged-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce for instance with vm_state building and task_state spawning. [ 652.045032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.045032] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 652.048853] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.547s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.054418] env[62914]: DEBUG nova.objects.instance [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lazy-loading 'resources' on Instance uuid ea214cc0-0f7a-4aee-9906-8d47e660c8f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.060186] env[62914]: DEBUG nova.objects.base [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 652.063303] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8a0859-ea7b-4b42-bca5-e193d550a37d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.096196] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5781b04b-e568-46a1-96ef-c660f3a6538c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.105267] env[62914]: DEBUG oslo_vmware.api [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 652.105267] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfa507-6421-0dd2-cfbb-deb2d7bc8bdc" [ 652.105267] env[62914]: _type = "Task" [ 652.105267] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.121963] env[62914]: DEBUG oslo_vmware.api [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfa507-6421-0dd2-cfbb-deb2d7bc8bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.170662] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831407, 'name': Rename_Task, 'duration_secs': 1.007595} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.171134] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 652.171498] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df9e981b-c1c3-42bf-8b83-a93c2841b05e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.180717] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 652.180717] env[62914]: value = "task-4831409" [ 652.180717] env[62914]: _type = "Task" [ 652.180717] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.190671] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.201088] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 652.203231] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331e2980-0f0a-4bad-b529-4bea8af542d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.212493] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 652.212701] env[62914]: ERROR oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk due to incomplete transfer. [ 652.213552] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-929476e0-81ee-4454-9ee9-fbeabe8fb909 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.219018] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831408, 'name': PowerOffVM_Task, 'duration_secs': 0.483871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.219849] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 652.219982] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 652.220262] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ecec3a5-42f4-42ec-b381-0f591f6cb412 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.226749] env[62914]: DEBUG oslo_vmware.rw_handles [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d787f5-f782-ed69-2a24-0c9e75de43cd/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 652.226993] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Uploaded image 0ea8d590-d90d-411e-b268-8ae777454c07 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 652.231018] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 652.231018] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0eec1fa7-ad06-46d7-b1e7-b8be85fb7073 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.237627] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 652.237627] env[62914]: value = "task-4831411" [ 652.237627] env[62914]: _type = "Task" [ 652.237627] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.248953] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831411, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.306423] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 652.306822] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 652.307131] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Deleting the datastore file [datastore1] 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.307480] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07dab747-7d64-4ab5-8e19-ebfdd00036c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.315103] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for the task: (returnval){ [ 652.315103] env[62914]: value = "task-4831412" [ 652.315103] env[62914]: _type = "Task" [ 652.315103] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.324274] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.350558] env[62914]: DEBUG nova.network.neutron [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Updating instance_info_cache with network_info: [{"id": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "address": "fa:16:3e:54:86:37", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bb7c6b3-8c", "ovs_interfaceid": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.570760] env[62914]: DEBUG nova.compute.utils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.577058] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 652.577465] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 652.619369] env[62914]: DEBUG oslo_vmware.api [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfa507-6421-0dd2-cfbb-deb2d7bc8bdc, 'name': SearchDatastore_Task, 'duration_secs': 0.020498} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.623666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.696689] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831409, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.749523] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831411, 'name': Destroy_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.788277] env[62914]: DEBUG nova.policy [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85de0d299e7745cb8c6805b46a8a3389', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c4d14e64cb240d9816b0677dc020110', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 652.829805] env[62914]: DEBUG oslo_vmware.api [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Task: {'id': task-4831412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33644} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.830664] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.830664] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 652.830821] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 652.831074] env[62914]: INFO nova.compute.manager [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 652.831392] env[62914]: DEBUG oslo.service.loopingcall [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.831638] env[62914]: DEBUG nova.compute.manager [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 652.831898] env[62914]: DEBUG nova.network.neutron [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 652.859382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Releasing lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.859808] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Instance network_info: |[{"id": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "address": "fa:16:3e:54:86:37", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bb7c6b3-8c", "ovs_interfaceid": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 652.860598] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:86:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.869892] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Creating folder: Project (5a4197794dad4013b8bca77e74af88f2). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.870612] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4318fc9-5021-4978-af59-f499d0a78268 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.885183] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Created folder: Project (5a4197794dad4013b8bca77e74af88f2) in parent group-v941773. [ 652.885480] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Creating folder: Instances. Parent ref: group-v941842. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.885771] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7724d02-66ac-48c1-8420-40231b4f329e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.900450] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Created folder: Instances in parent group-v941842. [ 652.900724] env[62914]: DEBUG oslo.service.loopingcall [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.900932] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 652.901210] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fda112a2-06fa-4c0d-b624-04e61269d613 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.934463] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.934463] env[62914]: value = "task-4831415" [ 652.934463] env[62914]: _type = "Task" [ 652.934463] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.946904] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831415, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.081497] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 653.177287] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b39110-bf07-408f-8a41-c2d36c6ef7de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.197454] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831409, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.202749] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49634f3-b81c-4e5d-a21d-1476cbfb2ab2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.248966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb099e0-8a6f-4d9a-8352-8cfb6d571da4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.265296] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831411, 'name': Destroy_Task, 'duration_secs': 0.526858} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.267446] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61fd62e-b87b-4169-9a9c-5f3f1a8dab62 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.274016] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Destroyed the VM [ 653.274525] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 653.276532] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-40069226-f429-45ea-bd30-133335d0a189 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.298621] env[62914]: DEBUG nova.compute.provider_tree [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.304427] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 653.304427] env[62914]: value = "task-4831416" [ 653.304427] env[62914]: _type = "Task" [ 653.304427] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.314533] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831416, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.331314] env[62914]: DEBUG nova.compute.manager [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Received event network-changed-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 653.331470] env[62914]: DEBUG nova.compute.manager [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Refreshing instance network info cache due to event network-changed-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 653.331691] env[62914]: DEBUG oslo_concurrency.lockutils [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] Acquiring lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.331888] env[62914]: DEBUG oslo_concurrency.lockutils [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] Acquired lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.332075] env[62914]: DEBUG nova.network.neutron [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Refreshing network info cache for port 0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 653.446567] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831415, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.628689] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Successfully updated port: e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.697596] env[62914]: DEBUG oslo_vmware.api [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831409, 'name': PowerOnVM_Task, 'duration_secs': 1.047824} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.697885] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 653.698822] env[62914]: INFO nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Took 13.21 seconds to spawn the instance on the hypervisor. [ 653.698822] env[62914]: DEBUG nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 653.699542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee848b4d-782f-47e0-aeed-c16d781d0ac9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.783249] env[62914]: INFO nova.compute.manager [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Rebuilding instance [ 653.803465] env[62914]: DEBUG nova.scheduler.client.report [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 653.823851] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831416, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.834129] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.834319] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.854165] env[62914]: DEBUG nova.compute.manager [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 653.855322] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d576d68-b2f6-472e-a990-fb55f1acd62a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.956484] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831415, 'name': CreateVM_Task, 'duration_secs': 0.621609} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.956563] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.957610] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.958971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.958971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.959558] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23ee79b4-5a7f-4fee-92ff-ca68bbd95cff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.965688] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 653.965688] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c5a17b-9530-94cd-6c24-ac41286524dd" [ 653.965688] env[62914]: _type = "Task" [ 653.965688] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.981368] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c5a17b-9530-94cd-6c24-ac41286524dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.101295] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 654.132780] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.132780] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.132969] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.144342] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.144620] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.145017] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.145903] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.146278] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.146607] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.147216] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.147543] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.148195] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.148504] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.149353] env[62914]: DEBUG nova.virt.hardware [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.153148] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8b567b-a776-4f61-992f-dce233de11fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.169841] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4711b9-2756-48e4-85ae-21c491095b68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.222418] env[62914]: INFO nova.compute.manager [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Took 36.36 seconds to build instance. [ 654.224357] env[62914]: DEBUG nova.network.neutron [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.318901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.322273] env[62914]: DEBUG oslo_vmware.api [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831416, 'name': RemoveSnapshot_Task, 'duration_secs': 0.887356} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.322948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.004s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.328756] env[62914]: INFO nova.compute.claims [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.329132] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 654.329175] env[62914]: INFO nova.compute.manager [None req-08bf3c7b-2fe3-41dd-b1f7-b24cbb827a17 tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 18.31 seconds to snapshot the instance on the hypervisor. [ 654.341050] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Successfully created port: eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.347841] env[62914]: DEBUG nova.compute.manager [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Received event network-vif-plugged-e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 654.348167] env[62914]: DEBUG oslo_concurrency.lockutils [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] Acquiring lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.348618] env[62914]: DEBUG oslo_concurrency.lockutils [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.348803] env[62914]: DEBUG oslo_concurrency.lockutils [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.349672] env[62914]: DEBUG nova.compute.manager [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] No waiting events found dispatching network-vif-plugged-e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 654.350073] env[62914]: WARNING nova.compute.manager [req-aff440ce-8d5c-440e-ab69-a43609fd98ed req-e5546e97-7861-4d8e-b5a5-a06dddf980b0 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Received unexpected event network-vif-plugged-e42f6371-f854-4e39-ae20-c78d59217dbb for instance with vm_state building and task_state spawning. [ 654.367939] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 654.368660] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9310238f-693e-4c59-885b-ac017938e73c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.377876] env[62914]: INFO nova.scheduler.client.report [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Deleted allocations for instance ea214cc0-0f7a-4aee-9906-8d47e660c8f7 [ 654.384509] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 654.384509] env[62914]: value = "task-4831417" [ 654.384509] env[62914]: _type = "Task" [ 654.384509] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.411071] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.484697] env[62914]: DEBUG nova.network.neutron [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.484697] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c5a17b-9530-94cd-6c24-ac41286524dd, 'name': SearchDatastore_Task, 'duration_secs': 0.024966} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.484697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.484697] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.484697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.484697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.484697] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.484697] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7765695-0979-436a-a0e6-d22a89b8f4cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.503450] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.503450] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 654.605941] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8d62124-397e-4ed4-ad32-113f8d6baf92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.605941] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 654.605941] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b8c56c-bbd7-5ac9-f8cd-fe13549535ab" [ 654.605941] env[62914]: _type = "Task" [ 654.605941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.605941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.605941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.605941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.605941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.605941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.605941] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b8c56c-bbd7-5ac9-f8cd-fe13549535ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.605941] env[62914]: INFO nova.compute.manager [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Terminating instance [ 654.605941] env[62914]: DEBUG nova.compute.manager [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 654.605941] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 654.605941] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb2f37b-1857-4d1d-bc22-5018ddaf7b72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.605941] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 654.605941] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0d8edb8-738d-4822-ab15-9ba328fc417b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.605941] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 654.605941] env[62914]: value = "task-4831418" [ 654.605941] env[62914]: _type = "Task" [ 654.605941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.607348] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.629622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.629622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.629622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.629622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.630238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.642803] env[62914]: INFO nova.compute.manager [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Terminating instance [ 654.653080] env[62914]: DEBUG nova.compute.manager [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 654.653080] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 654.653080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b7833-93e0-44a2-8b57-b06a98f2d8d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.672599] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 654.672599] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f3ad4c8-7292-49a0-87b8-50c329960551 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.726916] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1429961-d688-4e3c-bf91-96e7a3a5136d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.465s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.729825] env[62914]: INFO nova.compute.manager [-] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Took 3.30 seconds to deallocate network for instance. [ 654.756687] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 654.757062] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 654.757320] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore2] 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.757652] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d298094-a1fa-4352-b0a5-035f1402f23f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.767570] env[62914]: DEBUG oslo_vmware.api [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 654.767570] env[62914]: value = "task-4831420" [ 654.767570] env[62914]: _type = "Task" [ 654.767570] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.781123] env[62914]: DEBUG oslo_vmware.api [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.782693] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.897280] env[62914]: DEBUG oslo_concurrency.lockutils [None req-96e95ec9-795c-434d-b3f0-7cf30659992c tempest-ServerExternalEventsTest-1328511746 tempest-ServerExternalEventsTest-1328511746-project-member] Lock "ea214cc0-0f7a-4aee-9906-8d47e660c8f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.630s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.902252] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831417, 'name': PowerOffVM_Task, 'duration_secs': 0.255854} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.902252] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 654.902252] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 654.903557] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7968ad2a-a562-48b9-b2cf-e599af46d4e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.910672] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 654.911052] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6a37e3c-927c-4545-b141-4fce8ecb633a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.939154] env[62914]: INFO nova.compute.manager [-] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Took 2.11 seconds to deallocate network for instance. [ 654.949628] env[62914]: DEBUG nova.network.neutron [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Updated VIF entry in instance network info cache for port 0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 654.950046] env[62914]: DEBUG nova.network.neutron [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Updating instance_info_cache with network_info: [{"id": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "address": "fa:16:3e:54:86:37", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bb7c6b3-8c", "ovs_interfaceid": "0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.981333] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 654.981333] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 654.981333] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore2] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.981333] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41207287-6088-44c2-8428-9cdf10c6045a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.991167] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 654.991167] env[62914]: value = "task-4831422" [ 654.991167] env[62914]: _type = "Task" [ 654.991167] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.001615] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.033484] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b8c56c-bbd7-5ac9-f8cd-fe13549535ab, 'name': SearchDatastore_Task, 'duration_secs': 0.024673} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.036217] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b22f359-83d7-4f74-ae09-3fb862d12e91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.045021] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 655.045021] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5240250a-4641-14f9-e0b3-1e2fcbebbd92" [ 655.045021] env[62914]: _type = "Task" [ 655.045021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.054472] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5240250a-4641-14f9-e0b3-1e2fcbebbd92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.082977] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831418, 'name': PowerOffVM_Task, 'duration_secs': 0.207483} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.083326] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 655.083506] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 655.084160] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4768a83-2e13-43d5-9b92-94d6b57560fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.144539] env[62914]: DEBUG nova.network.neutron [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Updating instance_info_cache with network_info: [{"id": "e42f6371-f854-4e39-ae20-c78d59217dbb", "address": "fa:16:3e:50:ed:dd", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42f6371-f8", "ovs_interfaceid": "e42f6371-f854-4e39-ae20-c78d59217dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.226607] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 655.226749] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 655.226985] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleting the datastore file [datastore2] ef521e82-38ab-4d62-b434-da7f7fa8c50f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 655.227395] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f225c0f-87d6-4098-b3d5-a4bb892f550a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.230778] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 655.238956] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 655.238956] env[62914]: value = "task-4831424" [ 655.238956] env[62914]: _type = "Task" [ 655.238956] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.245430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.255318] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.282510] env[62914]: DEBUG oslo_vmware.api [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.461077} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.282625] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 655.282813] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 655.282994] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 655.283202] env[62914]: INFO nova.compute.manager [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Took 0.63 seconds to destroy the instance on the hypervisor. [ 655.283525] env[62914]: DEBUG oslo.service.loopingcall [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.284064] env[62914]: DEBUG nova.compute.manager [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 655.284064] env[62914]: DEBUG nova.network.neutron [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 655.452982] env[62914]: DEBUG oslo_concurrency.lockutils [req-26e9a785-cb4d-49c8-9b0c-d4d373fcd0cb req-23348ba8-249e-4a80-b8e9-7b5b73d0522d service nova] Releasing lock "refresh_cache-1ddb6508-d8fb-4ead-bcb0-370c19bb287d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.454477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.507281] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377174} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.507569] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 655.508797] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 655.508797] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 655.560846] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5240250a-4641-14f9-e0b3-1e2fcbebbd92, 'name': SearchDatastore_Task, 'duration_secs': 0.054959} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.561499] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.561966] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1ddb6508-d8fb-4ead-bcb0-370c19bb287d/1ddb6508-d8fb-4ead-bcb0-370c19bb287d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 655.562452] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52c8b64e-c412-4801-8379-b5ee963898ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.572416] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 655.572416] env[62914]: value = "task-4831425" [ 655.572416] env[62914]: _type = "Task" [ 655.572416] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.585901] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.647616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.647965] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Instance network_info: |[{"id": "e42f6371-f854-4e39-ae20-c78d59217dbb", "address": "fa:16:3e:50:ed:dd", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42f6371-f8", "ovs_interfaceid": "e42f6371-f854-4e39-ae20-c78d59217dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 655.648420] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:ed:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e42f6371-f854-4e39-ae20-c78d59217dbb', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.656383] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating folder: Project (df7ae349aea0487d88689eb09933eb1c). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 655.659279] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cd17043-3252-452b-b6b3-8814f0123365 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.673149] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created folder: Project (df7ae349aea0487d88689eb09933eb1c) in parent group-v941773. [ 655.673149] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating folder: Instances. Parent ref: group-v941845. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 655.676081] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5deb4e36-74f6-40a7-b975-f5473fd01346 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.691318] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created folder: Instances in parent group-v941845. [ 655.691318] env[62914]: DEBUG oslo.service.loopingcall [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.691465] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 655.691576] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0396cb4-9c3d-45cb-bbe2-2df9bf369a88 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.721425] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.721425] env[62914]: value = "task-4831428" [ 655.721425] env[62914]: _type = "Task" [ 655.721425] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.737815] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831428, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.762317] env[62914]: DEBUG oslo_vmware.api [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326112} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.762704] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 655.763059] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 655.763184] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 655.763276] env[62914]: INFO nova.compute.manager [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 655.763536] env[62914]: DEBUG oslo.service.loopingcall [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.770771] env[62914]: DEBUG nova.compute.manager [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 655.770886] env[62914]: DEBUG nova.network.neutron [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 655.779668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.907727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "fed831e0-4518-4025-89b1-7f6b644e013d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.908394] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.980424] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa34a25a-c417-4d8f-a84b-9b2520063d94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.990499] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b3d910-be76-4ad6-ae7c-1ff7beb22bb4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.029623] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4522cd52-4ebd-443d-ad29-2403bd354a5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.046389] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa34a7c-14a3-4124-911e-24011939975a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.065266] env[62914]: DEBUG nova.compute.provider_tree [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.088340] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831425, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.239226] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831428, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.295841] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.297191] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.569185] env[62914]: DEBUG nova.scheduler.client.report [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 656.587950] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831425, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.590319] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.591037] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.591037] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.591037] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.591216] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.591696] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.591969] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.592151] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.592330] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.592739] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.592739] env[62914]: DEBUG nova.virt.hardware [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.593693] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e57ee7-e552-4d66-911f-91fa23c13e0f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.607139] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a959a5-35d9-4036-9d71-78128590e611 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.628813] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:9b:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90cb4968-f2bd-4e77-9d1a-d66dcdf73599', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.637143] env[62914]: DEBUG oslo.service.loopingcall [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.637143] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 656.637802] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb02a1f5-6bc8-4bf9-b8b2-6bb96bb67569 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.659145] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.659145] env[62914]: value = "task-4831429" [ 656.659145] env[62914]: _type = "Task" [ 656.659145] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.669372] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831429, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.704441] env[62914]: DEBUG nova.network.neutron [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.738328] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831428, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.801763] env[62914]: DEBUG nova.compute.utils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.077801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.078313] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 657.088691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.686s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.091075] env[62914]: INFO nova.compute.claims [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.103849] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831425, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.071405} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.104174] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1ddb6508-d8fb-4ead-bcb0-370c19bb287d/1ddb6508-d8fb-4ead-bcb0-370c19bb287d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 657.104398] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.104661] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-856dc1fa-71e7-412f-a193-720b3a1d13ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.114052] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 657.114052] env[62914]: value = "task-4831430" [ 657.114052] env[62914]: _type = "Task" [ 657.114052] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.139202] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.175493] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831429, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.212901] env[62914]: INFO nova.compute.manager [-] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Took 1.93 seconds to deallocate network for instance. [ 657.235223] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831428, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.305723] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.585531] env[62914]: DEBUG nova.compute.utils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.590508] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 657.590508] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 657.630044] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123992} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.630765] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.632412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a798af2b-5b67-4744-877b-041e722ba24b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.663135] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 1ddb6508-d8fb-4ead-bcb0-370c19bb287d/1ddb6508-d8fb-4ead-bcb0-370c19bb287d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.663135] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9224c962-7561-436f-a6f0-5bf1df251295 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.695300] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831429, 'name': CreateVM_Task, 'duration_secs': 0.642645} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.696963] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 657.697544] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 657.697544] env[62914]: value = "task-4831431" [ 657.697544] env[62914]: _type = "Task" [ 657.697544] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.698405] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.698702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.699542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.700186] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf0c43d6-c9a5-4914-be3f-67194ca83675 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.712117] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 657.712117] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521cfc51-12c7-3ed3-3474-09d128dcb0cd" [ 657.712117] env[62914]: _type = "Task" [ 657.712117] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.716097] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.727112] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521cfc51-12c7-3ed3-3474-09d128dcb0cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.732158] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.738564] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831428, 'name': CreateVM_Task, 'duration_secs': 1.643669} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.741152] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 657.741778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.768601] env[62914]: DEBUG nova.policy [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b603a1ee50c34178992cdb26f88c3863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dd71bf518024821931bb9add9996d4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.092056] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 658.214109] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.227808] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521cfc51-12c7-3ed3-3474-09d128dcb0cd, 'name': SearchDatastore_Task, 'duration_secs': 0.029968} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.230668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.231401] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.231401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.231401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.231585] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.233150] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.233150] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 658.233150] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-648ab737-bb39-4a7e-bd15-d6026739a916 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.234712] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c62228a-a284-4512-b53c-93787d876a4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.245711] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 658.245711] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527643e5-ae1e-2452-1017-ffbac1f6263d" [ 658.245711] env[62914]: _type = "Task" [ 658.245711] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.255149] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.255443] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 658.257428] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a65cd1f9-4a75-42fb-bc6a-bb11f9043b6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.263884] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527643e5-ae1e-2452-1017-ffbac1f6263d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.271057] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 658.271057] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527d3bbf-fb16-b332-9fd1-b7238a4a3b19" [ 658.271057] env[62914]: _type = "Task" [ 658.271057] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.280214] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527d3bbf-fb16-b332-9fd1-b7238a4a3b19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.354135] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Successfully updated port: eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.435081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.435449] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.436322] env[62914]: INFO nova.compute.manager [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Attaching volume 1ee7830b-c356-46cb-bbb5-755b85e54338 to /dev/sdb [ 658.521290] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ed066b-5ec7-446c-9ed9-a65941150ffc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.534788] env[62914]: DEBUG nova.compute.manager [req-d90724e4-1454-42df-9a44-375b8d608462 req-25e8610f-3ffc-478f-b9e9-f63c8d728798 service nova] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Received event network-vif-deleted-cf87f855-3a4c-43d5-a06f-db1eb5eec958 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 658.545646] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101b10cf-9e31-47a1-bca8-4bd9867de322 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.561654] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Successfully created port: 3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.580428] env[62914]: DEBUG nova.virt.block_device [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updating existing volume attachment record: c92001e7-dd0a-4d1a-a51b-e1c7b0bed575 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 658.727026] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831431, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.754248] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.754797] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.770986] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527643e5-ae1e-2452-1017-ffbac1f6263d, 'name': SearchDatastore_Task, 'duration_secs': 0.026861} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.776697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.776697] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.776697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.787128] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527d3bbf-fb16-b332-9fd1-b7238a4a3b19, 'name': SearchDatastore_Task, 'duration_secs': 0.024593} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.787902] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07c4166b-665f-468f-b4f6-e0fb6580b7c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.796774] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 658.796774] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfaf58-44ff-0ce9-4190-8ba8c2cab06c" [ 658.796774] env[62914]: _type = "Task" [ 658.796774] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.811676] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfaf58-44ff-0ce9-4190-8ba8c2cab06c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.862859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.865063] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquired lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.865063] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.894105] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f146d56b-9190-46e3-bde9-08a9b48c1113 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.911402] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c0af80-e069-4e16-837f-158e6ddb033c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.953474] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19d8f5d-c843-4c78-b5f9-adf3d73edda7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.962938] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4c9bc1-60a3-44ff-813b-bc5b504d76d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.982447] env[62914]: DEBUG nova.compute.provider_tree [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.070868] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Received event network-changed-1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 659.070868] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Refreshing instance network info cache due to event network-changed-1c22c510-e137-4ee3-8038-3b784a81e04f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 659.070868] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.070868] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.070868] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Refreshing network info cache for port 1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 659.112279] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 659.149314] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.149314] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.149451] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.149570] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.149712] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.149955] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.150219] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.150380] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.150545] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.151370] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.151370] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.151809] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dda126-3cb1-4274-8c3b-c93011847b11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.162834] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3678c505-7ceb-41d0-9535-55903f8a27d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.217503] env[62914]: DEBUG nova.network.neutron [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.218870] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831431, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.311943] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bfaf58-44ff-0ce9-4190-8ba8c2cab06c, 'name': SearchDatastore_Task, 'duration_secs': 0.031115} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.312270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.313415] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 659.313838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.314090] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.314355] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dad40c3-7524-4c76-bb1e-2548feba57a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.316695] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3324eab-924d-4020-920b-2a1758a9041a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.331185] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 659.331185] env[62914]: value = "task-4831435" [ 659.331185] env[62914]: _type = "Task" [ 659.331185] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.337130] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.339763] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 659.340989] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62a3ec5-681d-46e9-968f-34cc458a25f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.354669] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.359792] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 659.359792] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14fa2-ebbe-eef5-50cc-36b0db058b63" [ 659.359792] env[62914]: _type = "Task" [ 659.359792] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.378041] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14fa2-ebbe-eef5-50cc-36b0db058b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.479923] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.485649] env[62914]: DEBUG nova.scheduler.client.report [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.718180] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831431, 'name': ReconfigVM_Task, 'duration_secs': 1.954726} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.718180] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 1ddb6508-d8fb-4ead-bcb0-370c19bb287d/1ddb6508-d8fb-4ead-bcb0-370c19bb287d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.718376] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a79a3a80-921a-43b8-9bb2-01c19ea657d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.722727] env[62914]: INFO nova.compute.manager [-] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Took 3.95 seconds to deallocate network for instance. [ 659.731530] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 659.731530] env[62914]: value = "task-4831436" [ 659.731530] env[62914]: _type = "Task" [ 659.731530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.743584] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831436, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.844213] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831435, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.871866] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14fa2-ebbe-eef5-50cc-36b0db058b63, 'name': SearchDatastore_Task, 'duration_secs': 0.022902} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.872847] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7edbdbbd-7b58-4e07-b9ba-f9abb5b267ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.879819] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 659.879819] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52111214-73ef-560e-f70c-efb62df991d8" [ 659.879819] env[62914]: _type = "Task" [ 659.879819] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.889530] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52111214-73ef-560e-f70c-efb62df991d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.995181] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.906s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.995909] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 660.001330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.910s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.003893] env[62914]: INFO nova.compute.claims [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.234386] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.248585] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831436, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.282368] env[62914]: DEBUG nova.network.neutron [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updating instance_info_cache with network_info: [{"id": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "address": "fa:16:3e:5f:37:96", "network": {"id": "e34551c0-a033-4c11-8d79-17366dafd005", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1105594559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c4d14e64cb240d9816b0677dc020110", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf21f1d-ed", "ovs_interfaceid": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.347209] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831435, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.403743] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52111214-73ef-560e-f70c-efb62df991d8, 'name': SearchDatastore_Task, 'duration_secs': 0.027868} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.403743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.403743] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/6bdcd778-0942-41e7-a6fb-7c3413d34ef7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 660.403743] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27fbf98d-da96-4c9a-9836-06065d4a5c5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.414907] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 660.414907] env[62914]: value = "task-4831437" [ 660.414907] env[62914]: _type = "Task" [ 660.414907] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.428109] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.510853] env[62914]: DEBUG nova.compute.utils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.521273] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 660.521273] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.695652] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Successfully updated port: 3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 660.744629] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831436, 'name': Rename_Task, 'duration_secs': 0.621117} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.746309] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 660.746763] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd07e507-25ca-458f-805c-895917f24d29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.759220] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 660.759220] env[62914]: value = "task-4831438" [ 660.759220] env[62914]: _type = "Task" [ 660.759220] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.775356] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.790037] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Releasing lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.790037] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Instance network_info: |[{"id": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "address": "fa:16:3e:5f:37:96", "network": {"id": "e34551c0-a033-4c11-8d79-17366dafd005", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1105594559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c4d14e64cb240d9816b0677dc020110", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf21f1d-ed", "ovs_interfaceid": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 660.790037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:37:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.801721] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Creating folder: Project (2c4d14e64cb240d9816b0677dc020110). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.801721] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40214018-61c8-4b2a-974f-6864c03765dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.813640] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Created folder: Project (2c4d14e64cb240d9816b0677dc020110) in parent group-v941773. [ 660.813984] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Creating folder: Instances. Parent ref: group-v941852. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.814154] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07747a19-76c5-438f-a36f-799d23d8e71b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.826805] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Created folder: Instances in parent group-v941852. [ 660.827086] env[62914]: DEBUG oslo.service.loopingcall [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.827495] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 660.827570] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30947e6e-9235-4e31-ab37-c5fab639d701 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.855411] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.855411] env[62914]: value = "task-4831441" [ 660.855411] env[62914]: _type = "Task" [ 660.855411] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.860747] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831435, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.029986} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.864899] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 660.864899] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.865227] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed4422af-c867-4a69-ae3b-6b3ab09815d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.877938] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831441, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.879351] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 660.879351] env[62914]: value = "task-4831442" [ 660.879351] env[62914]: _type = "Task" [ 660.879351] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.889199] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.925466] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.985454] env[62914]: DEBUG nova.policy [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b603a1ee50c34178992cdb26f88c3863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dd71bf518024821931bb9add9996d4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 661.018386] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 661.079126] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updated VIF entry in instance network info cache for port 1c22c510-e137-4ee3-8038-3b784a81e04f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 661.079718] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.198822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.199107] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.199295] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.279884] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831438, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.382885] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831441, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.396291] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184501} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.396782] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.397729] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251be58f-4838-44ad-b6da-77a913d4dcf5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.428194] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.435642] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64abe1f8-b39c-44c9-ba0c-e6054fa62bdc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.461031] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831437, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.009176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.461969] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/6bdcd778-0942-41e7-a6fb-7c3413d34ef7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 661.462664] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.463397] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 661.463397] env[62914]: value = "task-4831443" [ 661.463397] env[62914]: _type = "Task" [ 661.463397] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.463397] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6310d364-2c4a-4915-a11c-d9d3055f3083 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.481324] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.483458] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 661.483458] env[62914]: value = "task-4831444" [ 661.483458] env[62914]: _type = "Task" [ 661.483458] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.496623] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.584716] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.585103] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Received event network-changed-e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 661.585277] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Refreshing instance network info cache due to event network-changed-e42f6371-f854-4e39-ae20-c78d59217dbb. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 661.585431] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Acquiring lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.585579] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Acquired lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.585742] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Refreshing network info cache for port e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 661.668579] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0a694c-bbbd-476b-a391-1089d938da96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.677596] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39131cb7-8a0d-421d-b293-c4740d44c390 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.713478] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f830f3-73de-491e-8c16-abc15227648f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.722134] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8402a84e-d55c-4dc9-a757-bce417ba33ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.737203] env[62914]: DEBUG nova.compute.provider_tree [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.770776] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831438, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.773457] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Successfully created port: 770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.779058] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.880307] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831441, 'name': CreateVM_Task, 'duration_secs': 0.691061} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.880495] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 661.881303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.881474] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.881795] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 661.882077] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b615fd10-2220-4083-a0f3-792c23e6dc55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.887766] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 661.887766] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5201ae8c-6210-c223-aa41-e904d154ec29" [ 661.887766] env[62914]: _type = "Task" [ 661.887766] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.900968] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5201ae8c-6210-c223-aa41-e904d154ec29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.977044] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.999504] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182548} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.999801] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.000630] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372c3ab9-7bcf-4304-82b7-26e98bd1e8e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.024456] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/6bdcd778-0942-41e7-a6fb-7c3413d34ef7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.024827] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df453415-6fdb-4d50-9822-19d48234d43d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.041130] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 662.051069] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 662.051069] env[62914]: value = "task-4831445" [ 662.051069] env[62914]: _type = "Task" [ 662.051069] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.060583] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831445, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.071942] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.072236] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.072491] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.072604] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.073408] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.073408] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.073408] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.073408] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.073586] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.073634] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.073785] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.074817] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386e8f9c-5b86-4479-ae77-8752df040f3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.083411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1492f1-5b99-437d-85c3-71231752ed39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.208418] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Updating instance_info_cache with network_info: [{"id": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "address": "fa:16:3e:82:7e:6c", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a09d962-f1", "ovs_interfaceid": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.244495] env[62914]: DEBUG nova.scheduler.client.report [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.273454] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831438, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.421613] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5201ae8c-6210-c223-aa41-e904d154ec29, 'name': SearchDatastore_Task, 'duration_secs': 0.028785} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.422029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.422288] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.422537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.422682] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.422869] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.423229] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc45bf9d-51a2-40eb-9275-a414fac37ed9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.436311] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.436695] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 662.438559] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d7c213a-9677-43ab-a892-87ad07f6932f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.449018] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 662.449018] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200ad98-0234-0dd0-0b12-ef9e0a8034e9" [ 662.449018] env[62914]: _type = "Task" [ 662.449018] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.460306] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200ad98-0234-0dd0-0b12-ef9e0a8034e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.480313] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831443, 'name': ReconfigVM_Task, 'duration_secs': 0.57273} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.480995] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f/70a6d3e7-6928-47a7-9f7f-bd5dad64912f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.481448] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a48f0184-9ebb-4597-9e73-686a85f1ce90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.491754] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 662.491754] env[62914]: value = "task-4831446" [ 662.491754] env[62914]: _type = "Task" [ 662.491754] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.507306] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831446, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.520435] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Updated VIF entry in instance network info cache for port e42f6371-f854-4e39-ae20-c78d59217dbb. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 662.520435] env[62914]: DEBUG nova.network.neutron [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Updating instance_info_cache with network_info: [{"id": "e42f6371-f854-4e39-ae20-c78d59217dbb", "address": "fa:16:3e:50:ed:dd", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42f6371-f8", "ovs_interfaceid": "e42f6371-f854-4e39-ae20-c78d59217dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.565297] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831445, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.608762] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "cead3557-080d-4956-a957-cac449bb69f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.609088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.698040] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-deleted-668a09a1-427d-4507-b7d0-45cab066cac8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.698305] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Received event network-vif-plugged-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.698510] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Acquiring lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.698717] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.698924] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.699204] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] No waiting events found dispatching network-vif-plugged-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 662.699388] env[62914]: WARNING nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Received unexpected event network-vif-plugged-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc for instance with vm_state building and task_state spawning. [ 662.699556] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-deleted-a2583c61-5b1a-4a33-8206-4f81fca1b131 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.699727] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Received event network-changed-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.699898] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Refreshing instance network info cache due to event network-changed-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 662.700114] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Acquiring lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.700267] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Acquired lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.700434] env[62914]: DEBUG nova.network.neutron [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Refreshing network info cache for port eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 662.715614] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.716892] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Instance network_info: |[{"id": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "address": "fa:16:3e:82:7e:6c", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a09d962-f1", "ovs_interfaceid": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 662.717819] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:7e:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a09d962-f1f2-4390-8d9e-9856c75ba69f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.726428] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Creating folder: Project (7dd71bf518024821931bb9add9996d4e). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.728050] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4353d077-6860-40bd-8c91-f9775d2cd90b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.742424] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Created folder: Project (7dd71bf518024821931bb9add9996d4e) in parent group-v941773. [ 662.742849] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Creating folder: Instances. Parent ref: group-v941855. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.742849] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32d73b2f-fc72-4a84-b534-f8650e6bd8be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.753837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.754166] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 662.758171] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.628s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.759610] env[62914]: INFO nova.compute.claims [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.768342] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Created folder: Instances in parent group-v941855. [ 662.768614] env[62914]: DEBUG oslo.service.loopingcall [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.772665] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 662.772963] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbbf9cbf-1aae-4478-8e0f-c5387a84400f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.795014] env[62914]: DEBUG oslo_vmware.api [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831438, 'name': PowerOnVM_Task, 'duration_secs': 1.797239} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.800219] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 662.800379] env[62914]: INFO nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Took 14.07 seconds to spawn the instance on the hypervisor. [ 662.800470] env[62914]: DEBUG nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 662.801485] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.801485] env[62914]: value = "task-4831449" [ 662.801485] env[62914]: _type = "Task" [ 662.801485] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.801886] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcef678-5416-4130-a2e5-0926a56dd591 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.819793] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.919694] env[62914]: DEBUG nova.compute.manager [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Received event network-vif-plugged-3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.919962] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Acquiring lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.920214] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.920416] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.920571] env[62914]: DEBUG nova.compute.manager [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] No waiting events found dispatching network-vif-plugged-3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 662.921594] env[62914]: WARNING nova.compute.manager [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Received unexpected event network-vif-plugged-3a09d962-f1f2-4390-8d9e-9856c75ba69f for instance with vm_state building and task_state spawning. [ 662.921594] env[62914]: DEBUG nova.compute.manager [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Received event network-changed-3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 662.921594] env[62914]: DEBUG nova.compute.manager [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Refreshing instance network info cache due to event network-changed-3a09d962-f1f2-4390-8d9e-9856c75ba69f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 662.921594] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Acquiring lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.921594] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Acquired lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.921594] env[62914]: DEBUG nova.network.neutron [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Refreshing network info cache for port 3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 662.944297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.944854] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.959127] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5200ad98-0234-0dd0-0b12-ef9e0a8034e9, 'name': SearchDatastore_Task, 'duration_secs': 0.01775} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.959862] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e036066e-6839-4c84-82c3-cdfd5d92d0d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.966431] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 662.966431] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52566976-5f9b-040b-9fe0-40443faa310f" [ 662.966431] env[62914]: _type = "Task" [ 662.966431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.975480] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52566976-5f9b-040b-9fe0-40443faa310f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.005608] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831446, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.026114] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] Releasing lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.026316] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Received event network-vif-deleted-1e7a9bbc-61fa-4ecf-8142-2568ff07c25c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 663.026520] env[62914]: DEBUG nova.compute.manager [req-f0ac4295-bf07-4422-a796-bdd3cc64a151 req-d298eca5-2c16-4a23-8b93-57f379824339 service nova] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Received event network-vif-deleted-5dee04a2-563a-4fb4-8651-bec18ae531ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 663.063542] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831445, 'name': ReconfigVM_Task, 'duration_secs': 0.546761} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.063542] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/6bdcd778-0942-41e7-a6fb-7c3413d34ef7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.064077] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-787c9730-235a-4edd-a2aa-f15d3644693b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.072445] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 663.072445] env[62914]: value = "task-4831451" [ 663.072445] env[62914]: _type = "Task" [ 663.072445] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.083971] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831451, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.269260] env[62914]: DEBUG nova.compute.utils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.272911] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 663.272911] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.317433] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.335218] env[62914]: INFO nova.compute.manager [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Took 40.35 seconds to build instance. [ 663.357413] env[62914]: DEBUG nova.policy [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b603a1ee50c34178992cdb26f88c3863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dd71bf518024821931bb9add9996d4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 663.478335] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52566976-5f9b-040b-9fe0-40443faa310f, 'name': SearchDatastore_Task, 'duration_secs': 0.022935} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.478581] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.478902] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 82aab17d-a6d0-48cf-a59a-fbef7d402894/82aab17d-a6d0-48cf-a59a-fbef7d402894.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 663.479150] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b81ed550-ae71-4d1c-80fa-1e7a20852c45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.487349] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 663.487349] env[62914]: value = "task-4831452" [ 663.487349] env[62914]: _type = "Task" [ 663.487349] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.501666] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.513414] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831446, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.533385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "69a9cd15-7d6f-464d-b451-e193179088f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.533753] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.533971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.534189] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.534512] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.537226] env[62914]: INFO nova.compute.manager [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Terminating instance [ 663.539844] env[62914]: DEBUG nova.compute.manager [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 663.540078] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 663.541220] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc93561-20bd-4fce-87ff-866238ffe66a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.553617] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 663.553895] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c92d0148-7d8f-453f-a2bc-9ea050c6b501 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.562878] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 663.562878] env[62914]: value = "task-4831453" [ 663.562878] env[62914]: _type = "Task" [ 663.562878] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.574283] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.584569] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831451, 'name': Rename_Task, 'duration_secs': 0.225395} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.585903] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 663.585903] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa14134b-4fb9-4d84-aaff-d523d42fdba0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.594747] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 663.594747] env[62914]: value = "task-4831454" [ 663.594747] env[62914]: _type = "Task" [ 663.594747] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.604296] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.712804] env[62914]: DEBUG nova.network.neutron [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updated VIF entry in instance network info cache for port eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 663.713432] env[62914]: DEBUG nova.network.neutron [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updating instance_info_cache with network_info: [{"id": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "address": "fa:16:3e:5f:37:96", "network": {"id": "e34551c0-a033-4c11-8d79-17366dafd005", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1105594559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c4d14e64cb240d9816b0677dc020110", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf21f1d-ed", "ovs_interfaceid": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.783951] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 663.823022] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.837294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fb4da67-3c10-40d0-89a4-a8dd6318b78c tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.410s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.845371] env[62914]: DEBUG nova.network.neutron [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Updated VIF entry in instance network info cache for port 3a09d962-f1f2-4390-8d9e-9856c75ba69f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 663.845371] env[62914]: DEBUG nova.network.neutron [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Updating instance_info_cache with network_info: [{"id": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "address": "fa:16:3e:82:7e:6c", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a09d962-f1", "ovs_interfaceid": "3a09d962-f1f2-4390-8d9e-9856c75ba69f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.003802] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831452, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.025321] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831446, 'name': Rename_Task, 'duration_secs': 1.288609} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.025321] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 664.025556] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac63fb6a-1835-47ac-a274-10cc66968ab8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.036836] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 664.036836] env[62914]: value = "task-4831455" [ 664.036836] env[62914]: _type = "Task" [ 664.036836] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.047016] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.079111] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831453, 'name': PowerOffVM_Task, 'duration_secs': 0.333578} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.083064] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 664.083388] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 664.084231] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee255958-7132-46f6-8f6b-d5e0e5fd8bf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.110770] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.182303] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 664.183320] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 664.183320] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Deleting the datastore file [datastore1] 69a9cd15-7d6f-464d-b451-e193179088f7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.183320] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbb59700-3bf7-41be-bc2b-c7fd1bfd6b5d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.190832] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Successfully created port: d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.200589] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for the task: (returnval){ [ 664.200589] env[62914]: value = "task-4831457" [ 664.200589] env[62914]: _type = "Task" [ 664.200589] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.211785] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.217888] env[62914]: DEBUG oslo_concurrency.lockutils [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] Releasing lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.218332] env[62914]: DEBUG nova.compute.manager [req-73052db6-473a-40d9-9adc-40342e82f837 req-82a7f726-7433-4731-9748-7b07c6e47363 service nova] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Received event network-vif-deleted-a1cdb314-5a29-443e-8562-bced871a8df1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 664.320918] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.342773] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 664.347774] env[62914]: DEBUG oslo_concurrency.lockutils [req-f0df55cf-9171-4435-8e40-d3648e12f117 req-19a6a3bd-9a81-42a8-9aaa-cf00b30af1d6 service nova] Releasing lock "refresh_cache-d8d08c36-bec2-4117-9352-8e148d25dc9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.436595] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775892a2-6e93-4d1d-b667-64185e476458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.445587] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea5aa84-35c7-400d-956a-f85fab4cdcc7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.451618] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.451888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.452455] env[62914]: DEBUG nova.objects.instance [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 664.485499] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5c34ce-220c-4331-8a3f-696ef38fb003 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.496392] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3104743-354a-4036-8a34-2bfb9ef8db41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.504278] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.922837} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.504924] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 82aab17d-a6d0-48cf-a59a-fbef7d402894/82aab17d-a6d0-48cf-a59a-fbef7d402894.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 664.505189] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.505413] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f920b17-b913-4796-8563-0961f0c39227 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.515862] env[62914]: DEBUG nova.compute.provider_tree [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.523320] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 664.523320] env[62914]: value = "task-4831458" [ 664.523320] env[62914]: _type = "Task" [ 664.523320] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.533144] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.549662] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831455, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.608461] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.636587] env[62914]: DEBUG nova.objects.instance [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 664.715987] env[62914]: DEBUG oslo_vmware.api [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Task: {'id': task-4831457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.505703} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.716298] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 664.716506] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 664.716772] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 664.717062] env[62914]: INFO nova.compute.manager [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 664.717174] env[62914]: DEBUG oslo.service.loopingcall [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.717950] env[62914]: DEBUG nova.compute.manager [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 664.717950] env[62914]: DEBUG nova.network.neutron [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 664.753204] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Successfully updated port: 770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 664.803587] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 664.817018] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.838387] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.838701] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.838975] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.839283] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.839477] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.839641] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.839859] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.840036] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.840244] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.840426] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.840758] env[62914]: DEBUG nova.virt.hardware [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.841900] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20ae91b-115f-4fcd-9481-356f3eb00077 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.854296] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac219377-eb16-4017-9f16-5d4ffed688ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.878011] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.020631] env[62914]: DEBUG nova.scheduler.client.report [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.045057] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120081} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.054611] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.057553] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf56b55-a26c-4904-aaed-8b0966f0f522 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.072447] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831455, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.096938] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 82aab17d-a6d0-48cf-a59a-fbef7d402894/82aab17d-a6d0-48cf-a59a-fbef7d402894.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.098885] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7319d70a-78cb-4d35-b1e6-83083bd0780e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.128425] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.130336] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 665.130336] env[62914]: value = "task-4831459" [ 665.130336] env[62914]: _type = "Task" [ 665.130336] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.141186] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.142238] env[62914]: DEBUG nova.objects.base [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance<6fd5f3b8-1175-4bd5-b0b4-12517ba65271> lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 665.142238] env[62914]: DEBUG nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 665.197632] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 665.198350] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941851', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'name': 'volume-1ee7830b-c356-46cb-bbb5-755b85e54338', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494', 'attached_at': '', 'detached_at': '', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'serial': '1ee7830b-c356-46cb-bbb5-755b85e54338'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 665.199036] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a403f60d-ade9-4dbf-9d77-571b315037da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.225431] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224bdafb-14f2-4f00-bb02-6f33dfecc1e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.259667] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] volume-1ee7830b-c356-46cb-bbb5-755b85e54338/volume-1ee7830b-c356-46cb-bbb5-755b85e54338.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.262073] env[62914]: DEBUG nova.policy [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 665.265449] env[62914]: DEBUG nova.network.neutron [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.267226] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.267325] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.267516] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.268774] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658bfad4-5a55-4df6-b92d-194fff892825 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.287426] env[62914]: INFO nova.compute.manager [-] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Took 0.57 seconds to deallocate network for instance. [ 665.296636] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Waiting for the task: (returnval){ [ 665.296636] env[62914]: value = "task-4831460" [ 665.296636] env[62914]: _type = "Task" [ 665.296636] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.311805] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831460, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.321100] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831449, 'name': CreateVM_Task, 'duration_secs': 2.32101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.321562] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 665.322088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.322506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.322603] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 665.322883] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-755be503-cb76-473e-bc69-69e13948a7da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.329263] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 665.329263] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b2ff4a-629c-cd95-964e-7228211acc30" [ 665.329263] env[62914]: _type = "Task" [ 665.329263] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.342914] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b2ff4a-629c-cd95-964e-7228211acc30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.344366] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.533592] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.776s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.535579] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 665.541018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.402s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.543978] env[62914]: INFO nova.compute.claims [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.564585] env[62914]: DEBUG oslo_vmware.api [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831455, 'name': PowerOnVM_Task, 'duration_secs': 1.18817} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.564939] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 665.565176] env[62914]: DEBUG nova.compute.manager [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 665.566050] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be871f26-bae4-4787-a1e5-3a63ebbea3c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.620645] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.642767] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.759669] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Updating instance_info_cache with network_info: [{"id": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "address": "fa:16:3e:7a:88:91", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap770e30b2-2f", "ovs_interfaceid": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.797418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.811599] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.843970] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b2ff4a-629c-cd95-964e-7228211acc30, 'name': SearchDatastore_Task, 'duration_secs': 0.019913} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.844638] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.844897] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.845194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.845331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.845526] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.845824] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e347b7af-e3ae-43c9-8f68-37295e3f4573 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.856576] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.856910] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 665.857759] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0aba2e40-177c-4c34-98ab-e5a7eb26b0df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.865772] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 665.865772] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291e4c0-a3d4-a7cf-33fd-ae073a7a3ad4" [ 665.865772] env[62914]: _type = "Task" [ 665.865772] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.875099] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291e4c0-a3d4-a7cf-33fd-ae073a7a3ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.054549] env[62914]: DEBUG nova.compute.utils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 666.062246] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 666.064458] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 666.097438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.111807] env[62914]: DEBUG nova.policy [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ae04f998180421e907cba2912b03f06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d583bb937c140d39597825e78c54646', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 666.119678] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.142791] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831459, 'name': ReconfigVM_Task, 'duration_secs': 0.740279} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.143535] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 82aab17d-a6d0-48cf-a59a-fbef7d402894/82aab17d-a6d0-48cf-a59a-fbef7d402894.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.144151] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9aa1e381-a3bc-4dd7-ae0d-78694cc23c10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.154568] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 666.154568] env[62914]: value = "task-4831461" [ 666.154568] env[62914]: _type = "Task" [ 666.154568] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.169296] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831461, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.266035] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.266035] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Instance network_info: |[{"id": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "address": "fa:16:3e:7a:88:91", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap770e30b2-2f", "ovs_interfaceid": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 666.266035] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:88:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '770e30b2-2f05-4531-b9d0-6482b3d18b22', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.276313] env[62914]: DEBUG oslo.service.loopingcall [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.276590] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 666.276860] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46fdf0be-caa2-40f1-b4e6-cd6a0db48824 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.304517] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.304517] env[62914]: value = "task-4831462" [ 666.304517] env[62914]: _type = "Task" [ 666.304517] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.316485] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831460, 'name': ReconfigVM_Task, 'duration_secs': 0.666231} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.316485] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfigured VM instance instance-00000009 to attach disk [datastore1] volume-1ee7830b-c356-46cb-bbb5-755b85e54338/volume-1ee7830b-c356-46cb-bbb5-755b85e54338.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.329695] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4d6fd9c-310f-46ff-9a45-4117f79ed978 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.341070] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831462, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.348514] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Waiting for the task: (returnval){ [ 666.348514] env[62914]: value = "task-4831463" [ 666.348514] env[62914]: _type = "Task" [ 666.348514] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.360061] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.365752] env[62914]: DEBUG nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Successfully created port: ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.380668] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291e4c0-a3d4-a7cf-33fd-ae073a7a3ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.017748} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.381590] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d503189-e208-48f3-9bc7-ec147313943a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.389294] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.389294] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52511d7d-261d-b424-3dd1-10af4cc68d4d" [ 666.389294] env[62914]: _type = "Task" [ 666.389294] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.400398] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52511d7d-261d-b424-3dd1-10af4cc68d4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.472162] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Successfully created port: ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.563516] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 666.618085] env[62914]: DEBUG oslo_vmware.api [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831454, 'name': PowerOnVM_Task, 'duration_secs': 2.725512} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.618085] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 666.618085] env[62914]: INFO nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Took 15.29 seconds to spawn the instance on the hypervisor. [ 666.618085] env[62914]: DEBUG nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 666.619919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5d612c-5f11-4f23-a434-a7f04d33f9c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.668178] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831461, 'name': Rename_Task, 'duration_secs': 0.286808} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.668423] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 666.668902] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cceb54d-a7b4-4764-93b2-542f67df11d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.687775] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 666.687775] env[62914]: value = "task-4831464" [ 666.687775] env[62914]: _type = "Task" [ 666.687775] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.702382] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831464, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.822905] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831462, 'name': CreateVM_Task, 'duration_secs': 0.407504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.822905] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 666.823404] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.823747] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.824279] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 666.824830] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca9cba46-58e0-4f91-8b8c-eced08cd5547 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.836719] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.836719] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a894d5-83d2-f814-97b2-8470a61b485d" [ 666.836719] env[62914]: _type = "Task" [ 666.836719] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.847571] env[62914]: DEBUG nova.compute.manager [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Received event network-vif-plugged-770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 666.848055] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] Acquiring lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.848385] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.848650] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.848935] env[62914]: DEBUG nova.compute.manager [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] No waiting events found dispatching network-vif-plugged-770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 666.852022] env[62914]: WARNING nova.compute.manager [req-d6512856-b981-4fa2-9a1b-e8397e48b994 req-3003787e-3b93-4ec8-a148-b8b6b849e540 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Received unexpected event network-vif-plugged-770e30b2-2f05-4531-b9d0-6482b3d18b22 for instance with vm_state building and task_state spawning. [ 666.858370] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a894d5-83d2-f814-97b2-8470a61b485d, 'name': SearchDatastore_Task, 'duration_secs': 0.01173} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.859301] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.859713] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.860078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.860388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.860721] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.864323] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95f10bf3-722e-42fb-aecf-b0c0c404810d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.867281] env[62914]: DEBUG oslo_vmware.api [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831463, 'name': ReconfigVM_Task, 'duration_secs': 0.216991} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.867752] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941851', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'name': 'volume-1ee7830b-c356-46cb-bbb5-755b85e54338', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494', 'attached_at': '', 'detached_at': '', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'serial': '1ee7830b-c356-46cb-bbb5-755b85e54338'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 666.880936] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.880936] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 666.881599] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f1d5a42-c4f7-40de-a6c3-ed9b99d55444 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.888458] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.888458] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a6c0ca-3fb5-7e59-7c7b-4f61e079bcaf" [ 666.888458] env[62914]: _type = "Task" [ 666.888458] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.904448] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a6c0ca-3fb5-7e59-7c7b-4f61e079bcaf, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.909143] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52511d7d-261d-b424-3dd1-10af4cc68d4d, 'name': SearchDatastore_Task, 'duration_secs': 0.01677} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.909767] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Successfully updated port: d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.910909] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57f3cb01-7017-4148-9f33-ce5cddc06f40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.915240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.915240] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] d8d08c36-bec2-4117-9352-8e148d25dc9e/d8d08c36-bec2-4117-9352-8e148d25dc9e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 666.917090] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2366f423-3dfa-4bbd-8c2d-e8aa6b9dec19 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.923926] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.923926] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5297d6d3-f186-0413-737e-5ee81dd4d5df" [ 666.923926] env[62914]: _type = "Task" [ 666.923926] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.933270] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.933270] env[62914]: value = "task-4831465" [ 666.933270] env[62914]: _type = "Task" [ 666.933270] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.945306] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5297d6d3-f186-0413-737e-5ee81dd4d5df, 'name': SearchDatastore_Task, 'duration_secs': 0.011111} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.945800] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.946080] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1fa01184-1ed2-43de-bcbf-bd8658acc9f9/1fa01184-1ed2-43de-bcbf-bd8658acc9f9.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 666.946401] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8a1c501-0690-4959-b418-6b2c01f099a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.951699] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.960483] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 666.960483] env[62914]: value = "task-4831466" [ 666.960483] env[62914]: _type = "Task" [ 666.960483] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.970029] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.049245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "a4fca617-da38-4913-b2c8-a2921da6db56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.049870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.049870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.049870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.050081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.052458] env[62914]: INFO nova.compute.manager [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Terminating instance [ 667.056149] env[62914]: DEBUG nova.compute.manager [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 667.056347] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 667.057266] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f773c16a-8c4b-4749-966b-aa94afc1939b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.068723] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 667.069175] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bb3f241-0fdb-4e34-9e13-27abaf35fcf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.081382] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 667.081382] env[62914]: value = "task-4831467" [ 667.081382] env[62914]: _type = "Task" [ 667.081382] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.097285] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.145176] env[62914]: INFO nova.compute.manager [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Took 41.46 seconds to build instance. [ 667.179070] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7fefab-237e-4be0-807b-207946ed6f05 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.186587] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f21b314-4c76-49fb-94bd-3fc6c566068b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.202513] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831464, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.230393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d137c10-eb2c-4bf1-b5a2-efc7204c0404 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.240076] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5db5053-b2e8-4d02-b0a0-363a949340a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.254821] env[62914]: DEBUG nova.compute.provider_tree [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.418220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.418938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.418938] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.452571] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.474879] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.576782] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 667.596150] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831467, 'name': PowerOffVM_Task, 'duration_secs': 0.29119} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.596492] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 667.596861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 667.599368] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eb4cace-6695-49cd-b053-59cad41a85c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.607917] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 667.608214] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 667.608416] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.608702] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 667.608971] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.609235] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 667.609708] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 667.609802] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 667.610032] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 667.610306] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 667.610593] env[62914]: DEBUG nova.virt.hardware [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 667.611580] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144af352-b935-4e4b-96f7-6240230d8eda {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.620398] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbef85e-e9ec-4069-b998-52cdcccb2cd5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.647562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-edb656af-bad9-4d4e-9681-d8fe83744393 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.972s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.677868] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 667.678285] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 667.678575] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Deleting the datastore file [datastore2] a4fca617-da38-4913-b2c8-a2921da6db56 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 667.679211] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7bc3734-c6ed-4e18-b026-ed806307c662 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.689170] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for the task: (returnval){ [ 667.689170] env[62914]: value = "task-4831469" [ 667.689170] env[62914]: _type = "Task" [ 667.689170] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.706555] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.711409] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831464, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.759482] env[62914]: DEBUG nova.scheduler.client.report [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.884986] env[62914]: DEBUG nova.compute.manager [req-f2f9bad2-547c-446a-956b-73e6e289c0a9 req-0cf02d44-c4f7-446d-8d2b-e7d75929f343 service nova] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Received event network-vif-deleted-e321d104-ea08-4f03-8274-e8ef45dc8952 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 667.944694] env[62914]: DEBUG nova.objects.instance [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lazy-loading 'flavor' on Instance uuid a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.961221] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.962432] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.977175] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831466, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.154310] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 668.205211] env[62914]: DEBUG oslo_vmware.api [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Task: {'id': task-4831469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480887} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.205933] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.206305] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 668.206423] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 668.206631] env[62914]: INFO nova.compute.manager [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 1.15 seconds to destroy the instance on the hypervisor. [ 668.206867] env[62914]: DEBUG oslo.service.loopingcall [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.207081] env[62914]: DEBUG nova.compute.manager [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 668.207391] env[62914]: DEBUG nova.network.neutron [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 668.213177] env[62914]: DEBUG oslo_vmware.api [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831464, 'name': PowerOnVM_Task, 'duration_secs': 1.178305} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.213307] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 668.213537] env[62914]: INFO nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Took 14.11 seconds to spawn the instance on the hypervisor. [ 668.213650] env[62914]: DEBUG nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 668.214541] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec7abd4-d1df-4837-819a-834329e50f62 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.267348] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.725s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.267756] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 668.271929] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.515s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.277228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.277415] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 668.277795] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.636s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.279507] env[62914]: INFO nova.compute.claims [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.284011] env[62914]: DEBUG nova.network.neutron [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Updating instance_info_cache with network_info: [{"id": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "address": "fa:16:3e:9c:fb:2a", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1bc2a9f-23", "ovs_interfaceid": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.290660] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25112bd-56ab-47e8-89c6-b1ceeb31c32a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.307172] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d9d83c-cde5-424f-a3f9-4708a2ef3467 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.337950] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Successfully updated port: ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.340570] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ebe349-279e-4d95-843a-8a72560dedeb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.349943] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5894157b-47d7-45e4-885c-877b1d8857ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.397258] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179021MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 668.397759] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.455363] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831465, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.462250] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c56109-e6fe-421d-a943-3513e420f0eb tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.026s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.477969] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831466, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.156544} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.479605] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1fa01184-1ed2-43de-bcbf-bd8658acc9f9/1fa01184-1ed2-43de-bcbf-bd8658acc9f9.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 668.479605] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.479823] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29816f58-c5bf-4712-94a8-6cb54bcdbb78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.493170] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 668.493170] env[62914]: value = "task-4831470" [ 668.493170] env[62914]: _type = "Task" [ 668.493170] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.505227] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.696587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.736026] env[62914]: INFO nova.compute.manager [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Took 41.19 seconds to build instance. [ 668.785357] env[62914]: DEBUG nova.compute.utils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 668.786918] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 668.787201] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 668.801484] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.801484] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Instance network_info: |[{"id": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "address": "fa:16:3e:9c:fb:2a", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1bc2a9f-23", "ovs_interfaceid": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 668.802536] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:fb:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1bc2a9f-2310-438f-a8f0-d1e1f60ef641', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.810822] env[62914]: DEBUG oslo.service.loopingcall [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.811453] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 668.811720] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fc62f86-b0d0-4ded-805a-2b6afd2aaa7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.834158] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.834158] env[62914]: value = "task-4831471" [ 668.834158] env[62914]: _type = "Task" [ 668.834158] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.835607] env[62914]: DEBUG nova.policy [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90b2c487eb2c4e6da89557cc35b815d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '956b73f20dbc4c2187528d03ea975e02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 668.847419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.847550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquired lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.847715] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.849911] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831471, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.962027] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831465, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.887391} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.962027] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] d8d08c36-bec2-4117-9352-8e148d25dc9e/d8d08c36-bec2-4117-9352-8e148d25dc9e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 668.962027] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.962436] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdc81612-788b-4617-8b74-4ea1b2267657 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.972080] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 668.972080] env[62914]: value = "task-4831472" [ 668.972080] env[62914]: _type = "Task" [ 668.972080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.987833] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.009630] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093548} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.013961] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.014483] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dec097-5eee-4daa-8f38-24b4a95742a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.045010] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 1fa01184-1ed2-43de-bcbf-bd8658acc9f9/1fa01184-1ed2-43de-bcbf-bd8658acc9f9.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.045833] env[62914]: DEBUG nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Successfully updated port: ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.047484] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-966430b7-1dc7-4c00-a5e2-387214b6fb7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.070669] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 669.070669] env[62914]: value = "task-4831473" [ 669.070669] env[62914]: _type = "Task" [ 669.070669] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.082196] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.196744] env[62914]: DEBUG nova.network.neutron [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.204085] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Successfully created port: 5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.238204] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38254885-7d28-4d8a-95c8-e7aacbc71c1c tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.706s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.293585] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 669.347442] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831471, 'name': CreateVM_Task, 'duration_secs': 0.376805} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.347605] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 669.348344] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.348505] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.350069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 669.350069] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f561e532-268f-46a6-a25e-f3eb72a07cab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.362038] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 669.362038] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52338fd6-4398-c3a0-bf53-6cff99fcaa09" [ 669.362038] env[62914]: _type = "Task" [ 669.362038] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.374959] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52338fd6-4398-c3a0-bf53-6cff99fcaa09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.411283] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.488572] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084909} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.490551] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.498670] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cac6d93-f85d-4319-bc7c-bbbadd6c1de6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.529195] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] d8d08c36-bec2-4117-9352-8e148d25dc9e/d8d08c36-bec2-4117-9352-8e148d25dc9e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.538740] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6737ed67-5aba-42eb-852b-bf6620703a0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.558356] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 669.558356] env[62914]: value = "task-4831474" [ 669.558356] env[62914]: _type = "Task" [ 669.558356] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.570300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.570300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.570300] env[62914]: DEBUG nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.570300] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.586837] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831473, 'name': ReconfigVM_Task, 'duration_secs': 0.353242} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.589841] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 1fa01184-1ed2-43de-bcbf-bd8658acc9f9/1fa01184-1ed2-43de-bcbf-bd8658acc9f9.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.590813] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84f47224-4a19-4a7b-9668-7e7a194f242f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.599256] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 669.599256] env[62914]: value = "task-4831475" [ 669.599256] env[62914]: _type = "Task" [ 669.599256] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.611690] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831475, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.661386] env[62914]: DEBUG nova.network.neutron [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Updating instance_info_cache with network_info: [{"id": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "address": "fa:16:3e:f4:08:58", "network": {"id": "247af3ad-24a9-495b-bbb5-e1cb78f35739", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2062948421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d583bb937c140d39597825e78c54646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce7a44ed-a8", "ovs_interfaceid": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.683029] env[62914]: INFO nova.compute.manager [-] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Took 1.47 seconds to deallocate network for instance. [ 669.740775] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 669.875345] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52338fd6-4398-c3a0-bf53-6cff99fcaa09, 'name': SearchDatastore_Task, 'duration_secs': 0.019974} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.875681] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.876062] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.876195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.876352] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.876539] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.876826] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b20a6aee-5d1c-4327-82a5-13806b9e2cc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.892560] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.892939] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 669.893737] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-710b45e7-f578-47fb-9950-97f5fc396867 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.905236] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 669.905236] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527882f2-1fa3-cd77-db3c-3ddd74567a3a" [ 669.905236] env[62914]: _type = "Task" [ 669.905236] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.915990] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527882f2-1fa3-cd77-db3c-3ddd74567a3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.932372] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfab9572-279d-42bf-8019-1de1551a7006 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.941251] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae069dd0-7619-4e0f-99fe-0503b92f0129 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.974810] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9f529d-dec5-45a0-ac9a-24e7a7ff7d3b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.987927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb389f0e-dee1-4101-98a8-48a9e3b4daf6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.004505] env[62914]: DEBUG nova.compute.provider_tree [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.070808] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831474, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.112415] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831475, 'name': Rename_Task, 'duration_secs': 0.166047} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.112788] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 670.112953] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b90c774-980c-4d44-be7b-6bb0b0aa3b45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.120765] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 670.120765] env[62914]: value = "task-4831476" [ 670.120765] env[62914]: _type = "Task" [ 670.120765] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.131014] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.132676] env[62914]: WARNING nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 670.164532] env[62914]: DEBUG nova.compute.manager [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-vif-plugged-ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 670.164760] env[62914]: DEBUG oslo_concurrency.lockutils [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.164974] env[62914]: DEBUG oslo_concurrency.lockutils [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.165416] env[62914]: DEBUG oslo_concurrency.lockutils [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.165416] env[62914]: DEBUG nova.compute.manager [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] No waiting events found dispatching network-vif-plugged-ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 670.165533] env[62914]: WARNING nova.compute.manager [req-f380015d-3713-43f8-b169-91f18e2b6738 req-bce563bf-2c0a-49fd-9178-da533debe05a service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received unexpected event network-vif-plugged-ec98f693-b488-485c-8165-c736ecc6b3d7 for instance with vm_state active and task_state None. [ 670.168138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Releasing lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.168138] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Instance network_info: |[{"id": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "address": "fa:16:3e:f4:08:58", "network": {"id": "247af3ad-24a9-495b-bbb5-e1cb78f35739", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2062948421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d583bb937c140d39597825e78c54646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce7a44ed-a8", "ovs_interfaceid": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 670.168138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:08:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db68bd64-5b56-49af-a075-13dcf85cb2e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce7a44ed-a822-4d9c-ac68-4d421b3d5b23', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.175584] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Creating folder: Project (1d583bb937c140d39597825e78c54646). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.179083] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cfaed28-a8f5-4345-9dd7-e4b9e3e07e82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.192738] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Created folder: Project (1d583bb937c140d39597825e78c54646) in parent group-v941773. [ 670.192970] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Creating folder: Instances. Parent ref: group-v941860. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.193988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.194442] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-756b3653-8f78-4372-90eb-b34994901fec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.210197] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Created folder: Instances in parent group-v941860. [ 670.210553] env[62914]: DEBUG oslo.service.loopingcall [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.210772] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.211054] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ce26742-52ef-411d-b012-53c899ff1a94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.236918] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.236918] env[62914]: value = "task-4831479" [ 670.236918] env[62914]: _type = "Task" [ 670.236918] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.251385] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831479, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.271544] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.305298] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 670.350825] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 670.351123] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 670.351303] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.351781] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 670.351781] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.351908] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 670.352410] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 670.352410] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 670.352410] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 670.352581] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 670.352959] env[62914]: DEBUG nova.virt.hardware [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 670.353684] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc47acea-828e-4b6f-96a8-be14a270bf4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.365613] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37f1c4c-8bec-4954-be5b-31b8fa4a11c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.418738] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527882f2-1fa3-cd77-db3c-3ddd74567a3a, 'name': SearchDatastore_Task, 'duration_secs': 0.011291} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.420665] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb21386a-f7f6-4ee2-a987-2e34132d11de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.427594] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 670.427594] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd62ad-3699-36d7-c0c6-69f7de8c8eec" [ 670.427594] env[62914]: _type = "Task" [ 670.427594] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.439967] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd62ad-3699-36d7-c0c6-69f7de8c8eec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.508837] env[62914]: DEBUG nova.scheduler.client.report [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.513910] env[62914]: INFO nova.compute.manager [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Rescuing [ 670.514267] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.514475] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.514702] env[62914]: DEBUG nova.network.neutron [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 670.576702] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831474, 'name': ReconfigVM_Task, 'duration_secs': 0.84346} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.576702] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Reconfigured VM instance instance-00000019 to attach disk [datastore1] d8d08c36-bec2-4117-9352-8e148d25dc9e/d8d08c36-bec2-4117-9352-8e148d25dc9e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.577435] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0405c435-9a03-4c1f-a7f6-7b879d716c8e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.586305] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 670.586305] env[62914]: value = "task-4831480" [ 670.586305] env[62914]: _type = "Task" [ 670.586305] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.598552] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831480, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.632727] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831476, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.749794] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831479, 'name': CreateVM_Task, 'duration_secs': 0.38719} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.750047] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 670.750836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.750938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.751357] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 670.751644] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d4e34e5-757a-4439-b95d-dce8376f409e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.757690] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 670.757690] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eda646-2eb3-5a02-e0bd-06e46dd50bad" [ 670.757690] env[62914]: _type = "Task" [ 670.757690] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.768741] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eda646-2eb3-5a02-e0bd-06e46dd50bad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.783524] env[62914]: DEBUG nova.network.neutron [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec98f693-b488-485c-8165-c736ecc6b3d7", "address": "fa:16:3e:89:1a:24", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec98f693-b4", "ovs_interfaceid": "ec98f693-b488-485c-8165-c736ecc6b3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.924067] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Received event network-changed-770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 670.924248] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Refreshing instance network info cache due to event network-changed-770e30b2-2f05-4531-b9d0-6482b3d18b22. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 670.924480] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Acquiring lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.924628] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Acquired lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.924793] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Refreshing network info cache for port 770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 670.939475] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd62ad-3699-36d7-c0c6-69f7de8c8eec, 'name': SearchDatastore_Task, 'duration_secs': 0.013208} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.939721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.939983] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bfdd7711-d081-42cf-9e4a-2df556d1b72e/bfdd7711-d081-42cf-9e4a-2df556d1b72e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 670.940300] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0c6d56c-7984-4e9a-9ae9-ed6bc7ebec6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.950130] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 670.950130] env[62914]: value = "task-4831481" [ 670.950130] env[62914]: _type = "Task" [ 670.950130] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.960297] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.018086] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.018444] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 671.025620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 18.402s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.098995] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831480, 'name': Rename_Task, 'duration_secs': 0.27997} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.099393] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 671.099722] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d8789ce-8cc1-4459-b311-a6dde332a6f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.110415] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 671.110415] env[62914]: value = "task-4831482" [ 671.110415] env[62914]: _type = "Task" [ 671.110415] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.124162] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.134342] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831476, 'name': PowerOnVM_Task, 'duration_secs': 0.652384} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.134640] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 671.134866] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Took 9.09 seconds to spawn the instance on the hypervisor. [ 671.135072] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 671.135928] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09c538f-c935-46e5-8276-9f61af69434a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.273030] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eda646-2eb3-5a02-e0bd-06e46dd50bad, 'name': SearchDatastore_Task, 'duration_secs': 0.015932} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.273030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.273030] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.273030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.273030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.273030] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.273030] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d320312-ef3a-4923-9729-e18ca3a118a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.285027] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.285027] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 671.285027] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca142dc-5a5b-4d52-81f7-c64db4c2fa6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.288565] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.289836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.290222] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.291153] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b74431-d896-4c62-b16e-cb6d294e869c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.299214] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 671.299214] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52120976-ba04-a6ba-1d70-8fed0ee8e61e" [ 671.299214] env[62914]: _type = "Task" [ 671.299214] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.313601] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.314517] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.315130] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.315503] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.317018] env[62914]: DEBUG nova.virt.hardware [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.325936] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfiguring VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 671.336717] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcb6f4ae-8d78-40fa-b2b8-020d941ed4d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.349240] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Successfully updated port: 5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.364597] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52120976-ba04-a6ba-1d70-8fed0ee8e61e, 'name': SearchDatastore_Task, 'duration_secs': 0.0169} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.365142] env[62914]: DEBUG oslo_vmware.api [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 671.365142] env[62914]: value = "task-4831483" [ 671.365142] env[62914]: _type = "Task" [ 671.365142] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.369024] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db0d0e2f-e1b2-4902-bf22-4651024dca20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.376900] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 671.376900] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b0611-93dc-3898-751a-9ee03292b659" [ 671.376900] env[62914]: _type = "Task" [ 671.376900] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.381725] env[62914]: DEBUG oslo_vmware.api [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831483, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.394847] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b0611-93dc-3898-751a-9ee03292b659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.464986] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831481, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.480485] env[62914]: DEBUG nova.network.neutron [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Updating instance_info_cache with network_info: [{"id": "e42f6371-f854-4e39-ae20-c78d59217dbb", "address": "fa:16:3e:50:ed:dd", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape42f6371-f8", "ovs_interfaceid": "e42f6371-f854-4e39-ae20-c78d59217dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.528308] env[62914]: DEBUG nova.compute.utils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.530824] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 671.563716] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.564275] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.564556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.564861] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.565282] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.569092] env[62914]: INFO nova.compute.manager [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Terminating instance [ 671.581509] env[62914]: DEBUG nova.compute.manager [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 671.581789] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 671.583844] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc99f299-b98e-43f7-85e9-f8943f3ef295 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.595482] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 671.595889] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8a11c57-77d9-4306-93d4-54cb37c7a827 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.607708] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 671.607708] env[62914]: value = "task-4831484" [ 671.607708] env[62914]: _type = "Task" [ 671.607708] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.628384] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831482, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.637788] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.657761] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Took 34.31 seconds to build instance. [ 671.784722] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Updated VIF entry in instance network info cache for port 770e30b2-2f05-4531-b9d0-6482b3d18b22. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 671.785850] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Updating instance_info_cache with network_info: [{"id": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "address": "fa:16:3e:7a:88:91", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap770e30b2-2f", "ovs_interfaceid": "770e30b2-2f05-4531-b9d0-6482b3d18b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.852730] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.853320] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquired lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.853455] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 671.876698] env[62914]: DEBUG nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Received event network-vif-plugged-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 671.876934] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Acquiring lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.877167] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.877349] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.877525] env[62914]: DEBUG nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] No waiting events found dispatching network-vif-plugged-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 671.877703] env[62914]: WARNING nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Received unexpected event network-vif-plugged-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 for instance with vm_state building and task_state spawning. [ 671.877866] env[62914]: DEBUG nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Received event network-changed-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 671.878118] env[62914]: DEBUG nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Refreshing instance network info cache due to event network-changed-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 671.878339] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Acquiring lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.878480] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Acquired lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.878646] env[62914]: DEBUG nova.network.neutron [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Refreshing network info cache for port ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 671.889484] env[62914]: DEBUG oslo_vmware.api [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.900247] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b0611-93dc-3898-751a-9ee03292b659, 'name': SearchDatastore_Task, 'duration_secs': 0.076028} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.900893] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.901394] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 934a0ca3-d879-4b23-90fe-2c190c201a88/934a0ca3-d879-4b23-90fe-2c190c201a88.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 671.901726] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2419853-06e7-40fa-9cff-da2ad965de30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.911095] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 671.911095] env[62914]: value = "task-4831485" [ 671.911095] env[62914]: _type = "Task" [ 671.911095] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.925486] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.965033] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.827036} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.965378] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bfdd7711-d081-42cf-9e4a-2df556d1b72e/bfdd7711-d081-42cf-9e4a-2df556d1b72e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 671.965568] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.965859] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31d68d91-da50-4367-bd78-31fc7fb44924 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.975534] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 671.975534] env[62914]: value = "task-4831486" [ 671.975534] env[62914]: _type = "Task" [ 671.975534] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.984147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "refresh_cache-6bdcd778-0942-41e7-a6fb-7c3413d34ef7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.992793] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.036373] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 672.125770] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831484, 'name': PowerOffVM_Task, 'duration_secs': 0.425012} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.131859] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 672.132122] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 672.132429] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831482, 'name': PowerOnVM_Task, 'duration_secs': 0.785584} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.132948] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54d028bb-8d1f-4289-9a52-aa7b12550d41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.134800] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 672.134904] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Took 13.02 seconds to spawn the instance on the hypervisor. [ 672.135055] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 672.136544] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a138b256-2941-4726-b9d7-2d36227943c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.163782] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.830s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.164739] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b4e7bd-6063-4042-9bf0-c3bf3cca415a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.175228] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a02b07d-f21b-42c6-8dbb-7db48a6561ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.213870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a2df76-14b9-4f10-93f7-533e3026027c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.219718] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 672.219938] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 672.220152] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Deleting the datastore file [datastore1] 1ddb6508-d8fb-4ead-bcb0-370c19bb287d {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 672.222815] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-480c01c3-e621-460a-a055-e70e8907f4d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.227158] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc6967f-2fd8-4c82-9a58-d30db533d032 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.243551] env[62914]: DEBUG nova.compute.provider_tree [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.246583] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for the task: (returnval){ [ 672.246583] env[62914]: value = "task-4831488" [ 672.246583] env[62914]: _type = "Task" [ 672.246583] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.256970] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.266648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.267156] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.267414] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.269599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.269793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.272123] env[62914]: INFO nova.compute.manager [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Terminating instance [ 672.274423] env[62914]: DEBUG nova.compute.manager [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 672.274649] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 672.274890] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a284e4f-6aeb-4ff9-91e8-c88f1a4e347e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.282560] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 672.282560] env[62914]: value = "task-4831489" [ 672.282560] env[62914]: _type = "Task" [ 672.282560] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.288385] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Releasing lock "refresh_cache-1fa01184-1ed2-43de-bcbf-bd8658acc9f9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.288708] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Received event network-vif-plugged-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 672.288991] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Acquiring lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.289309] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.289536] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.289788] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] No waiting events found dispatching network-vif-plugged-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 672.290064] env[62914]: WARNING nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Received unexpected event network-vif-plugged-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 for instance with vm_state building and task_state spawning. [ 672.290315] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Received event network-changed-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 672.290498] env[62914]: DEBUG nova.compute.manager [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Refreshing instance network info cache due to event network-changed-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 672.290695] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Acquiring lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.290839] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Acquired lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.291265] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Refreshing network info cache for port d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.297020] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.385247] env[62914]: DEBUG oslo_vmware.api [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.394197] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.433690] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831485, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.490704] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108454} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.491026] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.491930] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d20095-4d22-483b-9e7b-28e6e1f03d4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.525967] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] bfdd7711-d081-42cf-9e4a-2df556d1b72e/bfdd7711-d081-42cf-9e4a-2df556d1b72e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.529953] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e957b870-2f22-44f4-8e6f-6cdac3da1539 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.556825] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 672.557904] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c206e925-0b1f-414a-ab13-915fac820fec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.568676] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 672.568676] env[62914]: value = "task-4831491" [ 672.568676] env[62914]: _type = "Task" [ 672.568676] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.570276] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 672.570276] env[62914]: value = "task-4831490" [ 672.570276] env[62914]: _type = "Task" [ 672.570276] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.589410] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.594201] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.666476] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Took 35.37 seconds to build instance. [ 672.672689] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 672.685436] env[62914]: DEBUG nova.network.neutron [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Updating instance_info_cache with network_info: [{"id": "5c562670-f8c1-48c4-9630-586f87930b56", "address": "fa:16:3e:ad:13:49", "network": {"id": "c35736c6-adee-491c-b310-41ace5afa0e1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-838617194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "956b73f20dbc4c2187528d03ea975e02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c562670-f8", "ovs_interfaceid": "5c562670-f8c1-48c4-9630-586f87930b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.748319] env[62914]: DEBUG nova.scheduler.client.report [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.764403] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.794231] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831489, 'name': PowerOffVM_Task, 'duration_secs': 0.379601} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.794987] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 672.794987] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 672.794987] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941785', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'name': 'volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e69c36e9-3c59-48e3-9962-ffe8de10a789', 'attached_at': '', 'detached_at': '', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'serial': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 672.795806] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5479596-303e-40bb-b05e-0a8096d3bac6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.821790] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c6b80f-b2b4-4af9-b674-ba26fa4c31a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.829814] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5349fd-a1b6-4f12-a166-6fe83f51d541 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.854023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a67c68-082e-4ee2-b58e-96ca93ad7afb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.872868] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] The volume has not been displaced from its original location: [datastore1] volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd/volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 672.878477] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Reconfiguring VM instance instance-0000000c to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 672.884635] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae91fea2-5811-47c2-a55c-a72c9dc39ad1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.905106] env[62914]: DEBUG oslo_vmware.api [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831483, 'name': ReconfigVM_Task, 'duration_secs': 1.300542} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.906302] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.906631] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfigured VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 672.910428] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 672.910428] env[62914]: value = "task-4831492" [ 672.910428] env[62914]: _type = "Task" [ 672.910428] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.926133] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831492, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.933422] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.844444} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.934620] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 934a0ca3-d879-4b23-90fe-2c190c201a88/934a0ca3-d879-4b23-90fe-2c190c201a88.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 672.934849] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.935142] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-475bb11a-e88b-4375-a217-e2effdd1bf17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.944326] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 672.944326] env[62914]: value = "task-4831493" [ 672.944326] env[62914]: _type = "Task" [ 672.944326] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.957640] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.051647] env[62914]: DEBUG nova.network.neutron [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Updated VIF entry in instance network info cache for port ce7a44ed-a822-4d9c-ac68-4d421b3d5b23. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 673.052150] env[62914]: DEBUG nova.network.neutron [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Updating instance_info_cache with network_info: [{"id": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "address": "fa:16:3e:f4:08:58", "network": {"id": "247af3ad-24a9-495b-bbb5-e1cb78f35739", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2062948421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d583bb937c140d39597825e78c54646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce7a44ed-a8", "ovs_interfaceid": "ce7a44ed-a822-4d9c-ac68-4d421b3d5b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.061802] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 673.071106] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Updated VIF entry in instance network info cache for port d1bc2a9f-2310-438f-a8f0-d1e1f60ef641. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 673.071106] env[62914]: DEBUG nova.network.neutron [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Updating instance_info_cache with network_info: [{"id": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "address": "fa:16:3e:9c:fb:2a", "network": {"id": "24f7a334-5551-4a4c-9d56-f67e363a98ac", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1901190492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dd71bf518024821931bb9add9996d4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1bc2a9f-23", "ovs_interfaceid": "d1bc2a9f-2310-438f-a8f0-d1e1f60ef641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.087713] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831491, 'name': PowerOffVM_Task, 'duration_secs': 0.413081} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.092374] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 673.093079] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.096443] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f70e140-f330-4fab-85bc-f1955c537eca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.123313] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 673.123699] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 673.123899] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.124113] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 673.124275] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.124433] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 673.124672] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 673.124861] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 673.125065] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 673.125199] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 673.125372] env[62914]: DEBUG nova.virt.hardware [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 673.126345] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50ba3fd-244e-4db2-b67e-197932b1f50e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.130502] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4591db0-313a-4b4f-adf5-aa10e403e3ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.142962] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750b6a94-6d53-4585-b217-c0ad2f6ba404 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.161898] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.169244] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Creating folder: Project (9cb64b10c45d4024a178ad65d8ba56e0). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 673.172132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.882s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.172391] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86749cf5-9935-4a42-a28e-51e5ef9fca4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.182626] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 673.182889] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b14ee1b-a1f0-45a2-82eb-e71e88ad75aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.186179] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Created folder: Project (9cb64b10c45d4024a178ad65d8ba56e0) in parent group-v941773. [ 673.186400] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Creating folder: Instances. Parent ref: group-v941863. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 673.187014] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26733165-7989-4376-8c1d-30785fe4402f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.192601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Releasing lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.192918] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Instance network_info: |[{"id": "5c562670-f8c1-48c4-9630-586f87930b56", "address": "fa:16:3e:ad:13:49", "network": {"id": "c35736c6-adee-491c-b310-41ace5afa0e1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-838617194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "956b73f20dbc4c2187528d03ea975e02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c562670-f8", "ovs_interfaceid": "5c562670-f8c1-48c4-9630-586f87930b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 673.193604] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 673.193604] env[62914]: value = "task-4831495" [ 673.193604] env[62914]: _type = "Task" [ 673.193604] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.193969] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:13:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c562670-f8c1-48c4-9630-586f87930b56', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.202937] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Creating folder: Project (956b73f20dbc4c2187528d03ea975e02). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 673.205884] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.206498] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86a9695c-f8d0-4331-9868-bd7cf87523eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.211089] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Created folder: Instances in parent group-v941863. [ 673.211344] env[62914]: DEBUG oslo.service.loopingcall [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.212081] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 673.212948] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94b0709e-00bb-40f6-8949-f3095b3c2004 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.230935] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 673.231326] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.234034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.234034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.234034] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.234034] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47d217a1-e2e0-4f5f-ac7e-a5fa41569e84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.236688] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.236688] env[62914]: value = "task-4831498" [ 673.236688] env[62914]: _type = "Task" [ 673.236688] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.242532] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Created folder: Project (956b73f20dbc4c2187528d03ea975e02) in parent group-v941773. [ 673.242763] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Creating folder: Instances. Parent ref: group-v941865. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 673.243560] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8a732bc-63ee-4965-95e2-76a5c0dc16f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.250282] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831498, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.262323] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.262532] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 673.267897] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4769e39e-3055-4405-bda3-213a0d514268 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.270432] env[62914]: DEBUG oslo_vmware.api [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Task: {'id': task-4831488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.579506} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.270692] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Created folder: Instances in parent group-v941865. [ 673.270915] env[62914]: DEBUG oslo.service.loopingcall [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.271508] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.271722] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 673.271928] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 673.272130] env[62914]: INFO nova.compute.manager [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Took 1.69 seconds to destroy the instance on the hypervisor. [ 673.272364] env[62914]: DEBUG oslo.service.loopingcall [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.272542] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 673.273168] env[62914]: DEBUG nova.compute.manager [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 673.273287] env[62914]: DEBUG nova.network.neutron [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.275083] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74695ef5-b55a-447e-a6c8-0ce99a92469c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.292527] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 673.292527] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e336a1-fa12-7706-76e6-26bd790bd398" [ 673.292527] env[62914]: _type = "Task" [ 673.292527] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.300262] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.300262] env[62914]: value = "task-4831500" [ 673.300262] env[62914]: _type = "Task" [ 673.300262] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.305049] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e336a1-fa12-7706-76e6-26bd790bd398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.316052] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831500, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.422883] env[62914]: DEBUG oslo_concurrency.lockutils [None req-80e0be51-cb4d-430a-8fff-db17373a0b04 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.971s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.431623] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831492, 'name': ReconfigVM_Task, 'duration_secs': 0.223927} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.432711] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Reconfigured VM instance instance-0000000c to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 673.438354] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eda04c7-dbb4-4e87-9406-03ce782b2c5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.461292] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.151138} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.461292] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.461503] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 673.461503] env[62914]: value = "task-4831501" [ 673.461503] env[62914]: _type = "Task" [ 673.461503] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.462101] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ba457e-9f22-40fe-ba35-f76606eb5734 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.475185] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831501, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.494318] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 934a0ca3-d879-4b23-90fe-2c190c201a88/934a0ca3-d879-4b23-90fe-2c190c201a88.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.495096] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ee87f8-61a6-4c86-8693-2f6ada064a70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.516804] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 673.516804] env[62914]: value = "task-4831502" [ 673.516804] env[62914]: _type = "Task" [ 673.516804] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.528398] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.555513] env[62914]: DEBUG oslo_concurrency.lockutils [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] Releasing lock "refresh_cache-934a0ca3-d879-4b23-90fe-2c190c201a88" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.555797] env[62914]: DEBUG nova.compute.manager [req-1adb933f-3140-42e5-91d6-24a1910eed38 req-c0427835-7ac2-4328-9e84-3129982cac37 service nova] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Received event network-vif-deleted-917f7d2d-3256-481e-9892-13779b20ab0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 673.570820] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.571155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.571444] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.571673] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.571928] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.574717] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bf95ae3-4ab9-4b15-9f3c-3d2a474187ec req-68c1aa54-266b-43ed-b0ff-970964253782 service nova] Releasing lock "refresh_cache-bfdd7711-d081-42cf-9e4a-2df556d1b72e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.575866] env[62914]: INFO nova.compute.manager [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Terminating instance [ 673.578568] env[62914]: DEBUG nova.compute.manager [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 673.578790] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 673.583315] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31499089-a7ff-4aa0-858c-889c02cfbdc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.594602] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831490, 'name': ReconfigVM_Task, 'duration_secs': 0.884693} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.597341] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Reconfigured VM instance instance-0000001b to attach disk [datastore2] bfdd7711-d081-42cf-9e4a-2df556d1b72e/bfdd7711-d081-42cf-9e4a-2df556d1b72e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.598220] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 673.598610] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd36fd53-769c-496e-9c0b-bda50eb7759e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.600902] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c595e92-39b3-4e57-8ff3-e46b15cfa5b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.609904] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 673.609904] env[62914]: value = "task-4831504" [ 673.609904] env[62914]: _type = "Task" [ 673.609904] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.612069] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 673.612069] env[62914]: value = "task-4831503" [ 673.612069] env[62914]: _type = "Task" [ 673.612069] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.633687] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831503, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.637059] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831504, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.677456] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 673.750208] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831498, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.773133] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.748s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.776191] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.531s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.776417] env[62914]: DEBUG nova.objects.instance [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lazy-loading 'resources' on Instance uuid e1018767-71e4-49c9-bd4d-02eae39dc26b {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 673.807046] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e336a1-fa12-7706-76e6-26bd790bd398, 'name': SearchDatastore_Task, 'duration_secs': 0.049428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.812668] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4c98a50-101a-480a-9261-48eed2c1e468 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.819907] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 673.819907] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a71aea-aaca-a3b2-7e51-d866c7d0df1c" [ 673.819907] env[62914]: _type = "Task" [ 673.819907] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.825196] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831500, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.836465] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a71aea-aaca-a3b2-7e51-d866c7d0df1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.978026] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831501, 'name': ReconfigVM_Task, 'duration_secs': 0.267639} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.978473] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941785', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'name': 'volume-713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e69c36e9-3c59-48e3-9962-ffe8de10a789', 'attached_at': '', 'detached_at': '', 'volume_id': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd', 'serial': '713ab20a-101e-495b-8fb5-6ebb8c0e42dd'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 673.978859] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 673.979876] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73606ab3-db5a-4402-a92e-8006a09b0f02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.988668] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 673.989126] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c86db95-1621-4ed2-ad9e-ddc6e53c1ba0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.030287] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831502, 'name': ReconfigVM_Task, 'duration_secs': 0.466702} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.030628] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 934a0ca3-d879-4b23-90fe-2c190c201a88/934a0ca3-d879-4b23-90fe-2c190c201a88.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.031347] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1c3f421-30a4-4877-b379-8e730fb508c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.041659] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 674.041659] env[62914]: value = "task-4831506" [ 674.041659] env[62914]: _type = "Task" [ 674.041659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.052969] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831506, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.064830] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 674.064996] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 674.065271] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Deleting the datastore file [datastore1] e69c36e9-3c59-48e3-9962-ffe8de10a789 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.065480] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a334fc6c-ddb1-4ca7-86c6-9cba230768ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.074170] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for the task: (returnval){ [ 674.074170] env[62914]: value = "task-4831507" [ 674.074170] env[62914]: _type = "Task" [ 674.074170] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.084975] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.127117] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831504, 'name': PowerOffVM_Task, 'duration_secs': 0.26447} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.130603] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 674.130813] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 674.131559] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831503, 'name': Rename_Task, 'duration_secs': 0.253293} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.131895] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96126395-b0fe-45d1-a2fb-370b054b2ed4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.133645] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 674.133934] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6d33035-de77-4293-812e-72483c9aa2d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.143832] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 674.143832] env[62914]: value = "task-4831508" [ 674.143832] env[62914]: _type = "Task" [ 674.143832] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.156527] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.201583] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.204261] env[62914]: DEBUG nova.network.neutron [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.212796] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 674.213170] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 674.213454] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore1] 70a6d3e7-6928-47a7-9f7f-bd5dad64912f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.213835] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-393cd4be-b604-46f9-8156-541ef404c631 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.223782] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 674.223782] env[62914]: value = "task-4831510" [ 674.223782] env[62914]: _type = "Task" [ 674.223782] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.233482] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.247291] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831498, 'name': CreateVM_Task, 'duration_secs': 0.622777} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.247494] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 674.247947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.248130] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.248491] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 674.248756] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86cdb30d-33a7-42d0-b152-80b750b2aa40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.255733] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 674.255733] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e4855-ab86-ebc7-3ea6-c7c9a53094e5" [ 674.255733] env[62914]: _type = "Task" [ 674.255733] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.265629] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e4855-ab86-ebc7-3ea6-c7c9a53094e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.326188] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831500, 'name': CreateVM_Task, 'duration_secs': 0.577505} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.333679] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 674.334826] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.335023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.335406] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 674.336295] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a39f16ec-80ed-4519-9025-167a84902004 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.343199] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a71aea-aaca-a3b2-7e51-d866c7d0df1c, 'name': SearchDatastore_Task, 'duration_secs': 0.021625} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.345312] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.345637] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 674.345998] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 674.345998] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528a83a9-a87a-48f9-61db-29177054130e" [ 674.345998] env[62914]: _type = "Task" [ 674.345998] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.346415] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-089bf878-4e42-4603-8eac-fcb7b83f42c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.360030] env[62914]: INFO nova.scheduler.client.report [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleted allocation for migration 2ccde65d-d838-4157-a87e-deeb37150b7b [ 674.367989] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 674.367989] env[62914]: value = "task-4831511" [ 674.367989] env[62914]: _type = "Task" [ 674.367989] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.372179] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528a83a9-a87a-48f9-61db-29177054130e, 'name': SearchDatastore_Task, 'duration_secs': 0.013254} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.378996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.379303] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 674.379530] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.379699] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.379885] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.380517] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8da63d46-d4af-4612-b331-39e84623bfc6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.389351] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.391575] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.391575] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 674.392246] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e51eb05-4394-4fd0-9fbe-77ee2a0da19a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.399546] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 674.399546] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205f16b-a6ac-f132-5c09-6869e70fa710" [ 674.399546] env[62914]: _type = "Task" [ 674.399546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.416026] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205f16b-a6ac-f132-5c09-6869e70fa710, 'name': SearchDatastore_Task, 'duration_secs': 0.012918} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.420743] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffba4e58-cc72-407a-a168-f02c91c47a2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.429787] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 674.429787] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529da80b-4641-20cc-bbdd-3abbaed15641" [ 674.429787] env[62914]: _type = "Task" [ 674.429787] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.446087] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529da80b-4641-20cc-bbdd-3abbaed15641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.476604] env[62914]: DEBUG nova.compute.manager [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Received event network-vif-plugged-5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 674.476604] env[62914]: DEBUG oslo_concurrency.lockutils [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] Acquiring lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.477149] env[62914]: DEBUG oslo_concurrency.lockutils [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.477149] env[62914]: DEBUG oslo_concurrency.lockutils [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.477149] env[62914]: DEBUG nova.compute.manager [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] No waiting events found dispatching network-vif-plugged-5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 674.477239] env[62914]: WARNING nova.compute.manager [req-247a2ac7-37db-49cc-8657-efd78110068c req-ca196965-9b8e-4df1-8436-cc30d18365ba service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Received unexpected event network-vif-plugged-5c562670-f8c1-48c4-9630-586f87930b56 for instance with vm_state building and task_state spawning. [ 674.558589] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831506, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.590245] env[62914]: DEBUG oslo_vmware.api [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Task: {'id': task-4831507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205938} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.590562] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 674.590762] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 674.591033] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.591258] env[62914]: INFO nova.compute.manager [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Took 2.32 seconds to destroy the instance on the hypervisor. [ 674.591565] env[62914]: DEBUG oslo.service.loopingcall [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.591815] env[62914]: DEBUG nova.compute.manager [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 674.591936] env[62914]: DEBUG nova.network.neutron [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.645276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.645520] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.647461] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "4496a977-30b2-4323-a561-884633958cdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.647713] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.659173] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831508, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.710552] env[62914]: INFO nova.compute.manager [-] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Took 1.44 seconds to deallocate network for instance. [ 674.744122] env[62914]: DEBUG oslo_vmware.api [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309977} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.744122] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 674.744122] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 674.744122] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.744122] env[62914]: INFO nova.compute.manager [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 674.744122] env[62914]: DEBUG oslo.service.loopingcall [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.744122] env[62914]: DEBUG nova.compute.manager [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 674.744122] env[62914]: DEBUG nova.network.neutron [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.750177] env[62914]: DEBUG nova.compute.manager [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-changed-ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 674.750396] env[62914]: DEBUG nova.compute.manager [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing instance network info cache due to event network-changed-ec98f693-b488-485c-8165-c736ecc6b3d7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 674.750639] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.750786] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.750967] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Refreshing network info cache for port ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 674.778606] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e4855-ab86-ebc7-3ea6-c7c9a53094e5, 'name': SearchDatastore_Task, 'duration_secs': 0.029498} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.784964] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.785479] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 674.785895] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.786165] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.786490] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.788379] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-439efed5-fe8f-4dea-ba1c-9447e7f1c389 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.809483] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.811414] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 674.811414] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c56537-5f11-41a5-8a44-7c0c3dd97974 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.821368] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 674.821368] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b4e1f-0b33-0f2c-126f-7ed04382f465" [ 674.821368] env[62914]: _type = "Task" [ 674.821368] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.835580] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b4e1f-0b33-0f2c-126f-7ed04382f465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.867606] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55729805-fb85-4d14-abce-1c2ac9b56cdb tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 26.269s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.888987] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831511, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.919526] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab0a9dd-91f5-4e6a-9276-2317d8410bee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.930760] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08388f0-7af9-4c43-9cac-b0ada6ca6e57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.946765] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529da80b-4641-20cc-bbdd-3abbaed15641, 'name': SearchDatastore_Task, 'duration_secs': 0.013579} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.975095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.975380] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4fbb08f0-6712-4e78-b9da-b33a812ec9b7/4fbb08f0-6712-4e78-b9da-b33a812ec9b7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 674.976426] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfee8c4f-c46b-4e62-ae99-44b3a7449894 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.979480] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05426c60-54ef-4012-814a-fba685f00603 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.988542] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 674.988542] env[62914]: value = "task-4831512" [ 674.988542] env[62914]: _type = "Task" [ 674.988542] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.995252] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c304c787-2adb-4229-85c6-f4fac8449be3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.013941] env[62914]: DEBUG nova.compute.provider_tree [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.019173] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.054297] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831506, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.152783] env[62914]: INFO nova.compute.manager [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Detaching volume 1ee7830b-c356-46cb-bbb5-755b85e54338 [ 675.159659] env[62914]: DEBUG oslo_vmware.api [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831508, 'name': PowerOnVM_Task, 'duration_secs': 0.678518} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.159870] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 675.160178] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Took 10.36 seconds to spawn the instance on the hypervisor. [ 675.160444] env[62914]: DEBUG nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 675.161286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec47571-1c3b-4e28-ad92-a232926a89b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.213385] env[62914]: INFO nova.virt.block_device [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Attempting to driver detach volume 1ee7830b-c356-46cb-bbb5-755b85e54338 from mountpoint /dev/sdb [ 675.218155] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 675.218155] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941851', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'name': 'volume-1ee7830b-c356-46cb-bbb5-755b85e54338', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494', 'attached_at': '', 'detached_at': '', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'serial': '1ee7830b-c356-46cb-bbb5-755b85e54338'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 675.218155] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afea9fc-eb5d-496e-9642-b56fb065a552 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.223632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.248638] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5e5486-6b60-4b80-b794-1894f2d8c73b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.258417] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bb703d-2247-4a39-9594-483f9d6c1c9d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.285169] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814be0a9-cd75-47d8-96cc-3b48cb4a7324 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.308562] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] The volume has not been displaced from its original location: [datastore1] volume-1ee7830b-c356-46cb-bbb5-755b85e54338/volume-1ee7830b-c356-46cb-bbb5-755b85e54338.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 675.314252] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfiguring VM instance instance-00000009 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 675.317795] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbfdcb5e-0e54-47e1-8e53-5b43a5427f3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.355278] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Waiting for the task: (returnval){ [ 675.355278] env[62914]: value = "task-4831513" [ 675.355278] env[62914]: _type = "Task" [ 675.355278] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.355944] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523b4e1f-0b33-0f2c-126f-7ed04382f465, 'name': SearchDatastore_Task, 'duration_secs': 0.077245} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.361191] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96102098-7f77-449d-bdb3-8953f655b464 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.374799] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 675.374799] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524997ed-384a-73fc-723b-1a9a4baf3f70" [ 675.374799] env[62914]: _type = "Task" [ 675.374799] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.376660] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.396638] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524997ed-384a-73fc-723b-1a9a4baf3f70, 'name': SearchDatastore_Task, 'duration_secs': 0.016844} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.397412] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831511, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742796} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.397813] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.398154] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 675.398492] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 675.398883] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aea19360-e022-44f3-9711-ea9a3ac03e17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.402298] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de350f42-128a-47f8-b13d-10f3e60e20b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.436105] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.439343] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72852f38-ab78-4374-9e37-715ece6ea92b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.454710] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 675.454710] env[62914]: value = "task-4831514" [ 675.454710] env[62914]: _type = "Task" [ 675.454710] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.469231] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 675.469231] env[62914]: value = "task-4831515" [ 675.469231] env[62914]: _type = "Task" [ 675.469231] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.480768] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.491079] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831515, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.505431] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831512, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.523855] env[62914]: DEBUG nova.scheduler.client.report [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.541877] env[62914]: DEBUG nova.compute.manager [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Received event network-changed-5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 675.541877] env[62914]: DEBUG nova.compute.manager [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Refreshing instance network info cache due to event network-changed-5c562670-f8c1-48c4-9630-586f87930b56. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 675.542342] env[62914]: DEBUG oslo_concurrency.lockutils [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] Acquiring lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.542342] env[62914]: DEBUG oslo_concurrency.lockutils [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] Acquired lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.542342] env[62914]: DEBUG nova.network.neutron [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Refreshing network info cache for port 5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 675.556799] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831506, 'name': Rename_Task, 'duration_secs': 1.171877} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.560030] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 675.560030] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfa41b95-d1d8-44e4-92f4-5d9dbdfcb6ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.568201] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 675.568201] env[62914]: value = "task-4831516" [ 675.568201] env[62914]: _type = "Task" [ 675.568201] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.580138] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.694227] env[62914]: INFO nova.compute.manager [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Took 30.63 seconds to build instance. [ 675.723934] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updated VIF entry in instance network info cache for port ec98f693-b488-485c-8165-c736ecc6b3d7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 675.724703] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec98f693-b488-485c-8165-c736ecc6b3d7", "address": "fa:16:3e:89:1a:24", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec98f693-b4", "ovs_interfaceid": "ec98f693-b488-485c-8165-c736ecc6b3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.845855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.846388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.846724] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.847105] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.847395] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.850355] env[62914]: INFO nova.compute.manager [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Terminating instance [ 675.853534] env[62914]: DEBUG nova.compute.manager [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 675.853642] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 675.854826] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618c61fc-c73d-41b3-b26b-88706fa72c98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.872863] env[62914]: DEBUG nova.network.neutron [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.875241] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831513, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.879808] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 675.880534] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91294025-0d0c-4ad2-8a96-625d104110d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.960186] env[62914]: DEBUG nova.network.neutron [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.984767] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.989164] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831514, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.991950] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 675.992497] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 675.992868] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleting the datastore file [datastore2] 9ce44ae9-9369-4c0c-9d14-9c8fde42d612 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.994282] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb7ca49c-8f96-4552-8e16-3c403916e3dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.016444] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851943} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.016444] env[62914]: DEBUG oslo_vmware.api [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 676.016444] env[62914]: value = "task-4831518" [ 676.016444] env[62914]: _type = "Task" [ 676.016444] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.016879] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4fbb08f0-6712-4e78-b9da-b33a812ec9b7/4fbb08f0-6712-4e78-b9da-b33a812ec9b7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 676.016879] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.017375] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1fb4f374-4202-43e0-838c-9183e2a82266 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.028489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.037633] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.583s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.038216] env[62914]: DEBUG nova.objects.instance [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lazy-loading 'resources' on Instance uuid 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 676.040663] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 676.040663] env[62914]: value = "task-4831519" [ 676.040663] env[62914]: _type = "Task" [ 676.040663] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.041164] env[62914]: DEBUG oslo_vmware.api [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.057247] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.088625] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831516, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.089704] env[62914]: INFO nova.scheduler.client.report [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleted allocations for instance e1018767-71e4-49c9-bd4d-02eae39dc26b [ 676.105333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "5a704020-921e-4ede-9fd9-b745c027a158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.106579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.197734] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c32b41be-f73b-48f5-8112-89175ee4f0db tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.775s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.229388] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.229630] env[62914]: DEBUG nova.compute.manager [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Received event network-changed-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 676.229857] env[62914]: DEBUG nova.compute.manager [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Refreshing instance network info cache due to event network-changed-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 676.230190] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Acquiring lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.231744] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Acquired lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.231744] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Refreshing network info cache for port eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 676.379508] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831513, 'name': ReconfigVM_Task, 'duration_secs': 0.550918} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.380296] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Reconfigured VM instance instance-00000009 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 676.388161] env[62914]: INFO nova.compute.manager [-] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Took 1.64 seconds to deallocate network for instance. [ 676.389106] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c879f0b3-890c-4d90-822d-12cee2d98759 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.417404] env[62914]: DEBUG nova.network.neutron [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Updated VIF entry in instance network info cache for port 5c562670-f8c1-48c4-9630-586f87930b56. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 676.418452] env[62914]: DEBUG nova.network.neutron [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Updating instance_info_cache with network_info: [{"id": "5c562670-f8c1-48c4-9630-586f87930b56", "address": "fa:16:3e:ad:13:49", "network": {"id": "c35736c6-adee-491c-b310-41ace5afa0e1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-838617194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "956b73f20dbc4c2187528d03ea975e02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c562670-f8", "ovs_interfaceid": "5c562670-f8c1-48c4-9630-586f87930b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.422289] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Waiting for the task: (returnval){ [ 676.422289] env[62914]: value = "task-4831520" [ 676.422289] env[62914]: _type = "Task" [ 676.422289] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.440757] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.471353] env[62914]: INFO nova.compute.manager [-] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Took 1.88 seconds to deallocate network for instance. [ 676.482705] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831514, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.019154} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.483558] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 676.483877] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.484383] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85416913-6704-4bd9-8ba4-122e91da6acb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.490650] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831515, 'name': ReconfigVM_Task, 'duration_secs': 0.772328} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.492695] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.492870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e765b47d-2348-43de-a0bd-8b70282566de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.497254] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 676.497254] env[62914]: value = "task-4831521" [ 676.497254] env[62914]: _type = "Task" [ 676.497254] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.530682] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3ff247c-0e22-475e-bb84-b0d32ad6795f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.550158] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831521, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.559166] env[62914]: DEBUG oslo_vmware.api [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.537172} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.561350] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 676.561629] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 676.561836] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 676.562088] env[62914]: INFO nova.compute.manager [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Took 0.71 seconds to destroy the instance on the hypervisor. [ 676.562415] env[62914]: DEBUG oslo.service.loopingcall [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.562785] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 676.562785] env[62914]: value = "task-4831522" [ 676.562785] env[62914]: _type = "Task" [ 676.562785] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.566412] env[62914]: DEBUG nova.compute.manager [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 676.566551] env[62914]: DEBUG nova.network.neutron [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.568557] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134179} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.572055] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.575023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e189214e-b291-4036-aecc-d14bb58ce90a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.610222] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 4fbb08f0-6712-4e78-b9da-b33a812ec9b7/4fbb08f0-6712-4e78-b9da-b33a812ec9b7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.610639] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.613865] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22abf417-8663-4264-b811-78ab76c10afe tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "e1018767-71e4-49c9-bd4d-02eae39dc26b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.359s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.620194] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4794debf-0ca2-428e-bc3c-75bf4d86536b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.636757] env[62914]: DEBUG oslo_vmware.api [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831516, 'name': PowerOnVM_Task, 'duration_secs': 0.885709} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.640230] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 676.640230] env[62914]: INFO nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Took 9.06 seconds to spawn the instance on the hypervisor. [ 676.640230] env[62914]: DEBUG nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 676.640803] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85c93b7-59c0-40b4-9fc6-872eaf35652b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.647715] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 676.647715] env[62914]: value = "task-4831523" [ 676.647715] env[62914]: _type = "Task" [ 676.647715] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.669850] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.701126] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 676.925335] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.926025] env[62914]: DEBUG oslo_concurrency.lockutils [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] Releasing lock "refresh_cache-4fbb08f0-6712-4e78-b9da-b33a812ec9b7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.926439] env[62914]: DEBUG nova.compute.manager [req-81d2dfd6-e998-4528-a260-a1f94462ef47 req-a5723d18-f616-4164-892d-4af6c0ad9f1c service nova] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Received event network-vif-deleted-0bb7c6b3-8c55-48e5-9a15-6c4eb11009ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 676.937285] env[62914]: DEBUG oslo_vmware.api [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Task: {'id': task-4831520, 'name': ReconfigVM_Task, 'duration_secs': 0.212456} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.939823] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941851', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'name': 'volume-1ee7830b-c356-46cb-bbb5-755b85e54338', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494', 'attached_at': '', 'detached_at': '', 'volume_id': '1ee7830b-c356-46cb-bbb5-755b85e54338', 'serial': '1ee7830b-c356-46cb-bbb5-755b85e54338'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 677.013459] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831521, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.013942] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.015974] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa9395c-5911-436f-96bd-90807ce9c7af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.042959] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.046012] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34e5b0d6-5b1d-4312-9b94-bd0649fbcf87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.068896] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 677.068896] env[62914]: value = "task-4831524" [ 677.068896] env[62914]: _type = "Task" [ 677.068896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.079813] env[62914]: INFO nova.compute.manager [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Took 0.61 seconds to detach 1 volumes for instance. [ 677.088875] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.089064] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831524, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.089759] env[62914]: DEBUG nova.compute.manager [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Deleting volume: 713ab20a-101e-495b-8fb5-6ebb8c0e42dd {{(pid=62914) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 677.173169] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.179136] env[62914]: INFO nova.compute.manager [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Took 32.08 seconds to build instance. [ 677.213738] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a1994b-8be4-4437-9274-2387d3a04231 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.223731] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b79ee73-3867-4c37-ab32-71dd8a0d9be5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.262146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.263030] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41330a8e-37b2-4285-9eca-88fdb9fc758b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.272527] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137b0e8d-00e0-4c05-a732-7a9063d2dfa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.290110] env[62914]: DEBUG nova.compute.provider_tree [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.336327] env[62914]: DEBUG nova.compute.manager [req-1739963d-bb4d-483c-989d-df880bbc76bf req-4549ec54-8a5e-4952-8e9a-ee5588209e9c service nova] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Received event network-vif-deleted-90cb4968-f2bd-4e77-9d1a-d66dcdf73599 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 677.342480] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updated VIF entry in instance network info cache for port eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 677.342865] env[62914]: DEBUG nova.network.neutron [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updating instance_info_cache with network_info: [{"id": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "address": "fa:16:3e:5f:37:96", "network": {"id": "e34551c0-a033-4c11-8d79-17366dafd005", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1105594559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c4d14e64cb240d9816b0677dc020110", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf21f1d-ed", "ovs_interfaceid": "eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.510232] env[62914]: DEBUG nova.objects.instance [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lazy-loading 'flavor' on Instance uuid a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 677.592915] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831524, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.593181] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831522, 'name': ReconfigVM_Task, 'duration_secs': 0.617932} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.597023] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 677.599234] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f646d49-990a-4f3c-8cad-41083246de5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.609335] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 677.609335] env[62914]: value = "task-4831526" [ 677.609335] env[62914]: _type = "Task" [ 677.609335] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.622044] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.664609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.664952] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831523, 'name': ReconfigVM_Task, 'duration_secs': 0.952146} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.665238] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 4fbb08f0-6712-4e78-b9da-b33a812ec9b7/4fbb08f0-6712-4e78-b9da-b33a812ec9b7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.665882] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21c77b81-f2a4-4a82-8c24-0180e736ab6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.677311] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 677.677311] env[62914]: value = "task-4831527" [ 677.677311] env[62914]: _type = "Task" [ 677.677311] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.683854] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61b7b355-e01d-4404-9375-7b1b336af26a tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.871s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.695586] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831527, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.767339] env[62914]: DEBUG nova.network.neutron [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.793820] env[62914]: DEBUG nova.scheduler.client.report [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.848225] env[62914]: DEBUG oslo_concurrency.lockutils [req-4b7d9644-dd47-42c3-9897-fe78f90a974c req-af85c76c-e67d-400e-abd6-66fc171a97c0 service nova] Releasing lock "refresh_cache-82aab17d-a6d0-48cf-a59a-fbef7d402894" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.034252] env[62914]: DEBUG nova.compute.manager [req-1b40fbb3-940e-424f-a61e-33cd686cd615 req-9c72c101-31c2-4116-a45b-3b24a8d8b633 service nova] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Received event network-vif-deleted-fc7b353a-564b-4bbe-b0e1-85f5f54f7092 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 678.034569] env[62914]: DEBUG nova.compute.manager [req-1b40fbb3-940e-424f-a61e-33cd686cd615 req-9c72c101-31c2-4116-a45b-3b24a8d8b633 service nova] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Received event network-vif-deleted-99005c2d-b79b-4aba-b30d-613274dad233 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 678.089789] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831524, 'name': ReconfigVM_Task, 'duration_secs': 0.755099} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.090222] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.090986] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1499046-8da4-4b94-abb6-e39cffcc50e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.098554] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 678.098554] env[62914]: value = "task-4831528" [ 678.098554] env[62914]: _type = "Task" [ 678.098554] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.103511] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.103737] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.103988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.104294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.104542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.107313] env[62914]: INFO nova.compute.manager [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Terminating instance [ 678.112838] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831528, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.113559] env[62914]: DEBUG nova.compute.manager [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 678.113764] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 678.118261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f317ee10-939f-475f-909a-10c5f35d00ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.127799] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831526, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.130782] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 678.131207] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a38b8e03-c624-4b43-b8ae-0773b9c93d8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.139408] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 678.139408] env[62914]: value = "task-4831529" [ 678.139408] env[62914]: _type = "Task" [ 678.139408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.153335] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.190467] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 678.193869] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831527, 'name': Rename_Task, 'duration_secs': 0.24806} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.193869] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 678.194821] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06ca5dc5-ec2e-49f6-867e-20e1ebf4676d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.204230] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 678.204230] env[62914]: value = "task-4831530" [ 678.204230] env[62914]: _type = "Task" [ 678.204230] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.215401] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.273511] env[62914]: INFO nova.compute.manager [-] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Took 1.70 seconds to deallocate network for instance. [ 678.303441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.265s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.307045] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.525s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.309043] env[62914]: INFO nova.compute.claims [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.349360] env[62914]: INFO nova.scheduler.client.report [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Deleted allocations for instance 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb [ 678.358312] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-ec98f693-b488-485c-8165-c736ecc6b3d7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.358611] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-ec98f693-b488-485c-8165-c736ecc6b3d7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.520866] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1187763a-65ff-442a-928d-265deaa2fe65 tempest-VolumesAssistedSnapshotsTest-5850821 tempest-VolumesAssistedSnapshotsTest-5850821-project-admin] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.875s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.612039] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831528, 'name': Rename_Task, 'duration_secs': 0.247391} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.612039] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 678.612236] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac57ff30-ef7a-45d2-8602-ad829f591275 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.626025] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 678.626025] env[62914]: value = "task-4831531" [ 678.626025] env[62914]: _type = "Task" [ 678.626025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.626258] env[62914]: DEBUG oslo_vmware.api [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831526, 'name': PowerOnVM_Task, 'duration_secs': 0.91443} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.626574] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 678.632488] env[62914]: DEBUG nova.compute.manager [None req-9f8c8640-7306-4286-b56a-8e73896366ab tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 678.633300] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef90636-9517-4609-a1d8-bd7769df1f29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.654227] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831529, 'name': PowerOffVM_Task, 'duration_secs': 0.332205} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.654696] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 678.654907] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 678.655278] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f68e2c4-5838-421d-8ca5-c456bfb7eddf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.716804] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831530, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.722317] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.728294] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 678.728802] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 678.728883] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleting the datastore file [datastore1] d8d08c36-bec2-4117-9352-8e148d25dc9e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 678.729525] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc52a9e-fcf7-499a-91a9-5139647ab74f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.737171] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 678.737171] env[62914]: value = "task-4831533" [ 678.737171] env[62914]: _type = "Task" [ 678.737171] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.746271] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.756464] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "interface-934a0ca3-d879-4b23-90fe-2c190c201a88-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.756737] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "interface-934a0ca3-d879-4b23-90fe-2c190c201a88-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.757171] env[62914]: DEBUG nova.objects.instance [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lazy-loading 'flavor' on Instance uuid 934a0ca3-d879-4b23-90fe-2c190c201a88 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 678.781060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.864520] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.864734] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.865704] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1224c3c5-9a41-407f-80f5-a3ec372f0a0e tempest-FloatingIPsAssociationNegativeTestJSON-815623881 tempest-FloatingIPsAssociationNegativeTestJSON-815623881-project-member] Lock "2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.188s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.867501] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8625d9d-aff6-4bf9-b3a2-0c6cb7d1a7f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.888834] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01eeabcd-c84e-4ec4-8312-c1741ffa2b25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.923092] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfiguring VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 678.923990] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-043e0e94-bcf0-41a0-82b7-a8d6ab1e153e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.948008] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 678.948008] env[62914]: value = "task-4831534" [ 678.948008] env[62914]: _type = "Task" [ 678.948008] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.958556] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.148255] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831531, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.221672] env[62914]: DEBUG oslo_vmware.api [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831530, 'name': PowerOnVM_Task, 'duration_secs': 0.843722} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.222038] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 679.222333] env[62914]: INFO nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Took 8.92 seconds to spawn the instance on the hypervisor. [ 679.223081] env[62914]: DEBUG nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 679.223485] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3d50cd-6558-47a4-a0ae-1a0627eac806 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.255974] env[62914]: DEBUG oslo_vmware.api [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343615} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.256351] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.256590] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 679.256936] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 679.257271] env[62914]: INFO nova.compute.manager [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 679.257625] env[62914]: DEBUG oslo.service.loopingcall [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.257964] env[62914]: DEBUG nova.compute.manager [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 679.258116] env[62914]: DEBUG nova.network.neutron [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 679.262318] env[62914]: DEBUG nova.objects.instance [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lazy-loading 'pci_requests' on Instance uuid 934a0ca3-d879-4b23-90fe-2c190c201a88 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.462150] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.646345] env[62914]: DEBUG oslo_vmware.api [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831531, 'name': PowerOnVM_Task, 'duration_secs': 0.700456} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.646653] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 679.647109] env[62914]: INFO nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Took 6.58 seconds to spawn the instance on the hypervisor. [ 679.647154] env[62914]: DEBUG nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 679.647969] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1321b95-295f-4926-8975-446722413c58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.757849] env[62914]: INFO nova.compute.manager [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Took 34.65 seconds to build instance. [ 679.768774] env[62914]: DEBUG nova.objects.base [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Object Instance<934a0ca3-d879-4b23-90fe-2c190c201a88> lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 679.771722] env[62914]: DEBUG nova.network.neutron [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 679.947341] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31941fda-12c2-4b1f-8bf7-f3301f8df139 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.964551] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e816406-f758-4538-9ebc-9a2f80ab1710 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.969321] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.972858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9200694b-a5e7-4ac4-b137-0129d609791b tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "interface-934a0ca3-d879-4b23-90fe-2c190c201a88-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.216s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.006176] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614e4008-7c5c-4eb8-906e-547b683dd87e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.016683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf277c92-46b2-4f25-8905-9b95c9aec996 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.034088] env[62914]: DEBUG nova.compute.provider_tree [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.177017] env[62914]: INFO nova.compute.manager [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Took 29.56 seconds to build instance. [ 680.263140] env[62914]: DEBUG oslo_concurrency.lockutils [None req-18a4963e-d977-42d5-bb3f-d6a7ea966b32 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.358121] env[62914]: DEBUG nova.network.neutron [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.475374] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.537267] env[62914]: DEBUG nova.scheduler.client.report [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.683030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43e8e77f-5a05-4364-ae64-aa66de13da94 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.083s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.861175] env[62914]: INFO nova.compute.manager [-] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Took 1.60 seconds to deallocate network for instance. [ 680.966710] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.045500] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.046087] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 681.049755] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.318s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.050034] env[62914]: DEBUG nova.objects.instance [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'resources' on Instance uuid 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.369297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.470449] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.558091] env[62914]: DEBUG nova.compute.utils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.563172] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 681.563620] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 681.656594] env[62914]: DEBUG nova.policy [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '684696f5ee584fe69c7d2bcef638dd79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '500bdabe48614a18bf0868e232ee91cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 681.673128] env[62914]: DEBUG nova.compute.manager [req-d6380ceb-a679-4440-b903-a83f72a89481 req-18ca0339-772a-4853-8603-23808fee15cf service nova] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Received event network-vif-deleted-3a09d962-f1f2-4390-8d9e-9856c75ba69f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 681.979713] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.066283] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 682.160745] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a34eba-0d76-4d72-a515-e1f565d90caf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.173247] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83ae6eb-9fdc-4aea-808a-7ec8f164fb12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.212744] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0182a7d6-058a-4d85-9536-83bbaa54aac0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.222514] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6125d9-84e8-4671-8c75-ef4dab45916f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.243278] env[62914]: DEBUG nova.compute.provider_tree [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.261942] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Successfully created port: b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.449187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "ff2cff97-1671-4f97-8f69-532253169ff8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.449727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.450168] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.450684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.450767] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.456468] env[62914]: INFO nova.compute.manager [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Terminating instance [ 682.465746] env[62914]: DEBUG nova.compute.manager [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 682.466156] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 682.466977] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ce1223-e3a7-41f7-b535-dab0b9d4dbfd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.477559] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.480783] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 682.481516] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-452c2890-e7e2-430a-91cb-7d62b1a021c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.495477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.495756] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.497182] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 682.497182] env[62914]: value = "task-4831536" [ 682.497182] env[62914]: _type = "Task" [ 682.497182] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.509639] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.748901] env[62914]: DEBUG nova.scheduler.client.report [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.970947] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.998349] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 683.013213] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831536, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.076212] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 683.109909] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.110213] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.110400] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.110595] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.114093] env[62914]: DEBUG nova.virt.hardware [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.114093] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e78ec2-c2ef-4959-8a22-1115a6e9e342 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.123110] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b57601-c333-4e09-ae64-94397d261d8d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.257202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.259814] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.030s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.260138] env[62914]: DEBUG nova.objects.instance [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lazy-loading 'resources' on Instance uuid ef521e82-38ab-4d62-b434-da7f7fa8c50f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 683.284366] env[62914]: INFO nova.scheduler.client.report [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted allocations for instance 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1 [ 683.286345] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "8b83f82b-42f7-4f33-abc4-ff278d343309" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.286562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.287732] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "934a0ca3-d879-4b23-90fe-2c190c201a88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.287907] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.288112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.288305] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.288505] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.293503] env[62914]: INFO nova.compute.manager [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Terminating instance [ 683.297183] env[62914]: DEBUG nova.compute.manager [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 683.297602] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 683.299065] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270241d0-ff61-4bfd-906c-181e852cfb3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.309569] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 683.309569] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85a960b3-cae3-465e-a9ea-82c495844037 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.318031] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 683.318031] env[62914]: value = "task-4831537" [ 683.318031] env[62914]: _type = "Task" [ 683.318031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.331439] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.473125] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.515171] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831536, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.533294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.587429] env[62914]: INFO nova.compute.manager [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Rebuilding instance [ 683.635495] env[62914]: DEBUG nova.compute.manager [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 683.636469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6234a7f-f19b-4f65-ab77-d88112dbf94c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.657066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.657066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.657066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.657066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.657066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.661020] env[62914]: INFO nova.compute.manager [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Terminating instance [ 683.661020] env[62914]: DEBUG nova.compute.manager [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 683.661020] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 683.661498] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef075b66-6b7f-4ed1-8fb8-9bdcf9ad561e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.672736] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 683.673181] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e35b8d0c-5ad9-424b-9411-888fce8bab50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.685469] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 683.685469] env[62914]: value = "task-4831538" [ 683.685469] env[62914]: _type = "Task" [ 683.685469] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.694360] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.795549] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 683.804309] env[62914]: DEBUG oslo_concurrency.lockutils [None req-793300a6-bead-43e8-b1d6-0d2da6b00639 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "6a9c973f-8aea-4403-9fa2-d37e5eec1ee1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.174s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.829945] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831537, 'name': PowerOffVM_Task, 'duration_secs': 0.339004} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.833868] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 683.834087] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 683.834595] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b68af39-bfc7-4843-9285-fbdb315c5bb0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.916413] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 683.916650] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 683.916847] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Deleting the datastore file [datastore2] 934a0ca3-d879-4b23-90fe-2c190c201a88 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.920123] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cce63b4a-2cec-4e9f-af2d-c0b5c6b9458e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.930178] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for the task: (returnval){ [ 683.930178] env[62914]: value = "task-4831540" [ 683.930178] env[62914]: _type = "Task" [ 683.930178] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.937491] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.977711] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.015137] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831536, 'name': PowerOffVM_Task, 'duration_secs': 1.281299} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.015449] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 684.015616] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 684.015874] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32922cbd-11bd-46e5-ab4a-4677a10fbda8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.040083] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Successfully updated port: b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 684.106094] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 684.106417] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 684.106644] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Deleting the datastore file [datastore2] ff2cff97-1671-4f97-8f69-532253169ff8 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 684.109676] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db93adf0-24e0-4890-b1cf-f641ae9717cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.119697] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for the task: (returnval){ [ 684.119697] env[62914]: value = "task-4831542" [ 684.119697] env[62914]: _type = "Task" [ 684.119697] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.131190] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.147598] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 684.147921] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94b5b911-b819-492e-aa35-8776be01184a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.160357] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 684.160357] env[62914]: value = "task-4831543" [ 684.160357] env[62914]: _type = "Task" [ 684.160357] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.175103] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.197201] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831538, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.277272] env[62914]: DEBUG nova.compute.manager [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 684.278263] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ead109-5205-46ac-9bf9-7a32ed5a1c24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.304597] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21160a15-0236-4d7d-a9e3-189e96b60971 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.321929] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d71d3f-bd36-4667-aaf1-a0d098e505cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.332197] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.357537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.357836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.358061] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.358257] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.360484] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.360957] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7f9e46-a970-48cb-9da6-ce74c8b03946 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.365057] env[62914]: INFO nova.compute.manager [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Terminating instance [ 684.372515] env[62914]: DEBUG nova.compute.manager [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 684.372733] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 684.374202] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60815165-3ca8-419c-a5d2-c292bd763074 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.380092] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68c8de4-ef9e-4e7d-841c-4ffe671623b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.399605] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 684.400254] env[62914]: DEBUG nova.compute.provider_tree [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.401692] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b631343-c37c-4193-a65a-5e1e77a73154 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.410884] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 684.410884] env[62914]: value = "task-4831544" [ 684.410884] env[62914]: _type = "Task" [ 684.410884] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.424594] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.439803] env[62914]: DEBUG oslo_vmware.api [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Task: {'id': task-4831540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281014} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.440230] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.440327] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 684.440479] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 684.440709] env[62914]: INFO nova.compute.manager [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Took 1.14 seconds to destroy the instance on the hypervisor. [ 684.440972] env[62914]: DEBUG oslo.service.loopingcall [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 684.441245] env[62914]: DEBUG nova.compute.manager [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 684.441343] env[62914]: DEBUG nova.network.neutron [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.477783] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.544073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.544073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquired lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.544292] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 684.631839] env[62914]: DEBUG oslo_vmware.api [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Task: {'id': task-4831542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198379} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.632055] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.632310] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 684.632455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 684.632649] env[62914]: INFO nova.compute.manager [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Took 2.17 seconds to destroy the instance on the hypervisor. [ 684.632900] env[62914]: DEBUG oslo.service.loopingcall [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 684.633138] env[62914]: DEBUG nova.compute.manager [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 684.633237] env[62914]: DEBUG nova.network.neutron [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.672113] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831543, 'name': PowerOffVM_Task, 'duration_secs': 0.178983} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.672591] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 684.672939] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 684.674053] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546e3214-5909-4e7a-8ffe-fac1cc2879dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.683588] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 684.683966] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e54aa50a-289a-4327-9ee2-0cc79a2ca4bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.696280] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831538, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.721072] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 684.721072] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 684.721356] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Deleting the datastore file [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 684.721773] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bfe99e8-7089-4486-865b-9a28bf5551a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.731933] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 684.731933] env[62914]: value = "task-4831546" [ 684.731933] env[62914]: _type = "Task" [ 684.731933] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.744860] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.791946] env[62914]: INFO nova.compute.manager [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] instance snapshotting [ 684.792661] env[62914]: DEBUG nova.objects.instance [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 684.808514] env[62914]: DEBUG nova.compute.manager [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Received event network-vif-plugged-b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 684.808850] env[62914]: DEBUG oslo_concurrency.lockutils [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] Acquiring lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.809240] env[62914]: DEBUG oslo_concurrency.lockutils [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] Lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.809492] env[62914]: DEBUG oslo_concurrency.lockutils [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] Lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.809819] env[62914]: DEBUG nova.compute.manager [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] No waiting events found dispatching network-vif-plugged-b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 684.810456] env[62914]: WARNING nova.compute.manager [req-f15b86a0-542e-4837-9e4f-a8538ced286d req-fb25a689-ff9e-4a74-9b3c-eac2dedaa126 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Received unexpected event network-vif-plugged-b84edfae-0f54-42b6-99a8-f767333d7360 for instance with vm_state building and task_state spawning. [ 684.905810] env[62914]: DEBUG nova.scheduler.client.report [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.927619] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831544, 'name': PowerOffVM_Task, 'duration_secs': 0.211013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.927934] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 684.928162] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 684.928385] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-236d3f4f-d79b-40fc-8ea2-ec137e3827d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.977830] env[62914]: DEBUG oslo_vmware.api [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831534, 'name': ReconfigVM_Task, 'duration_secs': 5.801112} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.977830] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.977830] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Reconfigured VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 685.023430] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.023430] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.023619] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Deleting the datastore file [datastore2] 4fbb08f0-6712-4e78-b9da-b33a812ec9b7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.025936] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-103dc6a7-ee80-497e-b8ee-4d88b818da4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.036274] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for the task: (returnval){ [ 685.036274] env[62914]: value = "task-4831548" [ 685.036274] env[62914]: _type = "Task" [ 685.036274] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.044594] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.086483] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.198649] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831538, 'name': PowerOffVM_Task, 'duration_secs': 1.115448} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.198959] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 685.199449] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 685.199759] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d117ca3d-32d7-4434-b8b1-888c4b689278 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.247373] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.247768] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.248027] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.248236] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.270843] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.275023] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.275023] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Deleting the datastore file [datastore1] a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.275023] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95ad628a-a3e4-4ee0-8ccc-1fbd8cbbbd82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.278643] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for the task: (returnval){ [ 685.278643] env[62914]: value = "task-4831550" [ 685.278643] env[62914]: _type = "Task" [ 685.278643] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.291930] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.299695] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ea48b8-8516-4e0a-9761-f544e2e6fe71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.333886] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f680c8e0-3c53-4585-8f75-268bb70c9ca2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.400731] env[62914]: DEBUG nova.network.neutron [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Updating instance_info_cache with network_info: [{"id": "b84edfae-0f54-42b6-99a8-f767333d7360", "address": "fa:16:3e:27:79:be", "network": {"id": "74cef65b-589b-4ed3-aae9-6518635699bf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-420307571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "500bdabe48614a18bf0868e232ee91cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb84edfae-0f", "ovs_interfaceid": "b84edfae-0f54-42b6-99a8-f767333d7360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.411914] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.414442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.536s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.416138] env[62914]: INFO nova.compute.claims [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.451017] env[62914]: DEBUG nova.network.neutron [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.466540] env[62914]: INFO nova.scheduler.client.report [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleted allocations for instance ef521e82-38ab-4d62-b434-da7f7fa8c50f [ 685.553925] env[62914]: DEBUG oslo_vmware.api [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Task: {'id': task-4831548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.477517} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.554344] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.554404] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.554551] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.554730] env[62914]: INFO nova.compute.manager [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 685.554976] env[62914]: DEBUG oslo.service.loopingcall [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.555195] env[62914]: DEBUG nova.compute.manager [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 685.556212] env[62914]: DEBUG nova.network.neutron [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 685.601133] env[62914]: DEBUG nova.network.neutron [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.790981] env[62914]: DEBUG oslo_vmware.api [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Task: {'id': task-4831550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220572} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.792485] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.792700] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.792886] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.793081] env[62914]: INFO nova.compute.manager [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Took 2.13 seconds to destroy the instance on the hypervisor. [ 685.793340] env[62914]: DEBUG oslo.service.loopingcall [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.796228] env[62914]: DEBUG nova.compute.manager [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 685.796344] env[62914]: DEBUG nova.network.neutron [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 685.798302] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "4cea2bd1-a238-4fb6-bc47-719894461228" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.798554] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.846045] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 685.846394] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0d19b501-c3ac-4ec6-af68-5e06b8afb7be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.855590] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 685.855590] env[62914]: value = "task-4831551" [ 685.855590] env[62914]: _type = "Task" [ 685.855590] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.867111] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.903778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Releasing lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.904125] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Instance network_info: |[{"id": "b84edfae-0f54-42b6-99a8-f767333d7360", "address": "fa:16:3e:27:79:be", "network": {"id": "74cef65b-589b-4ed3-aae9-6518635699bf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-420307571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "500bdabe48614a18bf0868e232ee91cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb84edfae-0f", "ovs_interfaceid": "b84edfae-0f54-42b6-99a8-f767333d7360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 685.904563] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:79:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b84edfae-0f54-42b6-99a8-f767333d7360', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.913402] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Creating folder: Project (500bdabe48614a18bf0868e232ee91cc). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 685.913738] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99179fce-6b54-49f6-b544-3256d7a7a6ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.926292] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Created folder: Project (500bdabe48614a18bf0868e232ee91cc) in parent group-v941773. [ 685.926672] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Creating folder: Instances. Parent ref: group-v941869. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 685.927212] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9132d931-f5d0-48c5-b039-89f69262b4a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.937276] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Created folder: Instances in parent group-v941869. [ 685.937531] env[62914]: DEBUG oslo.service.loopingcall [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.937994] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 685.938230] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5583d6c7-57e3-4c94-a12a-e445a78c85b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.966210] env[62914]: INFO nova.compute.manager [-] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Took 1.52 seconds to deallocate network for instance. [ 685.977802] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.977802] env[62914]: value = "task-4831554" [ 685.977802] env[62914]: _type = "Task" [ 685.977802] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.981025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-90e7c0f8-3469-4376-8478-70ffce127eff tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "ef521e82-38ab-4d62-b434-da7f7fa8c50f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.444s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.988660] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831554, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.107820] env[62914]: INFO nova.compute.manager [-] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Took 1.47 seconds to deallocate network for instance. [ 686.299269] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 686.299822] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 686.300604] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 686.300604] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 686.300957] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 686.304032] env[62914]: DEBUG nova.virt.hardware [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 686.304032] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036d1379-978c-4dbd-8bba-a84764d58b7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.314610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228dea64-00f4-4e34-93c8-9d7177fa9a41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.321086] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.321436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.321677] env[62914]: DEBUG nova.network.neutron [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.335967] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 686.343162] env[62914]: DEBUG oslo.service.loopingcall [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.343265] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 686.343533] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3571d4f-469a-4abf-b076-5a4b0e0d42d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.371861] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 686.371861] env[62914]: value = "task-4831555" [ 686.371861] env[62914]: _type = "Task" [ 686.371861] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.371861] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831551, 'name': CreateSnapshot_Task, 'duration_secs': 0.51288} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.372127] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 686.377051] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f5755c-2199-4e4d-979c-d373f534174a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.388265] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831555, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.478609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.494173] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831554, 'name': CreateVM_Task, 'duration_secs': 0.395942} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.494173] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 686.494784] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.494947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.495295] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 686.495568] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d54b92d7-0dbc-4df6-b0a0-fe90dc3d9158 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.503710] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 686.503710] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f798d-f5d1-0567-da6e-46eda4ecf2c8" [ 686.503710] env[62914]: _type = "Task" [ 686.503710] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.515017] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f798d-f5d1-0567-da6e-46eda4ecf2c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.573786] env[62914]: DEBUG nova.network.neutron [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.615796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.684115] env[62914]: DEBUG nova.compute.manager [req-a782947e-20c9-41e8-9826-63528cbfa57f req-bc1132a2-fc69-4755-aeb4-3b4d53a3711a service nova] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Received event network-vif-deleted-5c562670-f8c1-48c4-9630-586f87930b56 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 686.884020] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831555, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.905933] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 686.907073] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-94a81351-536a-4a19-9dbe-b84b1fc0294f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.919040] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 686.919040] env[62914]: value = "task-4831556" [ 686.919040] env[62914]: _type = "Task" [ 686.919040] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.933886] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831556, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.020806] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f798d-f5d1-0567-da6e-46eda4ecf2c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010111} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.021345] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.021652] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.022019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.022276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.022576] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.022994] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3a82990-90f8-4958-bfb2-5e84a265563a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.038742] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.038901] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 687.040647] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43c1f874-4158-4ba3-92b0-10a37e21c0a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.050924] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 687.050924] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5254ffd6-447d-cb4a-1285-4e45376b9b35" [ 687.050924] env[62914]: _type = "Task" [ 687.050924] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.060446] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5254ffd6-447d-cb4a-1285-4e45376b9b35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.077433] env[62914]: INFO nova.compute.manager [-] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Took 1.52 seconds to deallocate network for instance. [ 687.080977] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94b6156-038d-41d3-9b50-652473f134dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.091925] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35923f1-1c18-4d41-9cd1-cae8e1eecf1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.123880] env[62914]: DEBUG nova.network.neutron [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.127619] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5ebc5b-7e0c-439d-8571-3242c0cae7a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.137693] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f8e83d-7f86-47aa-91e8-7980047fc454 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.156950] env[62914]: DEBUG nova.compute.provider_tree [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.224220] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Received event network-changed-b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 687.224610] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Refreshing instance network info cache due to event network-changed-b84edfae-0f54-42b6-99a8-f767333d7360. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 687.224944] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Acquiring lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.225183] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Acquired lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.225404] env[62914]: DEBUG nova.network.neutron [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Refreshing network info cache for port b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.386046] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831555, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.408410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.411570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.411570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.411570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.411570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.411981] env[62914]: INFO nova.compute.manager [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Terminating instance [ 687.415656] env[62914]: DEBUG nova.compute.manager [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 687.415858] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 687.416754] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f2a1f5-e802-4f64-9bc3-bc2ae4b07d71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.429824] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831556, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.434965] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 687.434965] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2820805f-af74-421d-a161-a263429ce6cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.439999] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 687.439999] env[62914]: value = "task-4831557" [ 687.439999] env[62914]: _type = "Task" [ 687.439999] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.452173] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.491922] env[62914]: INFO nova.network.neutron [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Port ec98f693-b488-485c-8165-c736ecc6b3d7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 687.491922] env[62914]: DEBUG nova.network.neutron [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.563340] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5254ffd6-447d-cb4a-1285-4e45376b9b35, 'name': SearchDatastore_Task, 'duration_secs': 0.011789} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.564275] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51bb667b-7855-4b89-962e-989ec8d1f5bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.570576] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 687.570576] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528abc0a-5321-2a85-7046-00fd1046c98f" [ 687.570576] env[62914]: _type = "Task" [ 687.570576] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.580481] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528abc0a-5321-2a85-7046-00fd1046c98f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.588721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.631881] env[62914]: INFO nova.compute.manager [-] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Took 1.84 seconds to deallocate network for instance. [ 687.665028] env[62914]: DEBUG nova.scheduler.client.report [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.886107] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831555, 'name': CreateVM_Task, 'duration_secs': 1.412451} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.888707] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 687.889648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.889897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.890408] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 687.891190] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d624fa19-ea54-4ccc-8768-aa138c2a0576 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.898477] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 687.898477] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f522c0-a12b-2676-6c64-f974dfa75948" [ 687.898477] env[62914]: _type = "Task" [ 687.898477] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.912487] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f522c0-a12b-2676-6c64-f974dfa75948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.931026] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831556, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.954833] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831557, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.993928] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.086473] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528abc0a-5321-2a85-7046-00fd1046c98f, 'name': SearchDatastore_Task, 'duration_secs': 0.0105} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.086473] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.086675] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 567f3d61-ed30-47d9-aebc-77c9392be506/567f3d61-ed30-47d9-aebc-77c9392be506.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 688.087014] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc219347-72d2-44db-8b4f-9edf4091206e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.094578] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 688.094578] env[62914]: value = "task-4831558" [ 688.094578] env[62914]: _type = "Task" [ 688.094578] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.106857] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.140694] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.143103] env[62914]: DEBUG nova.network.neutron [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Updated VIF entry in instance network info cache for port b84edfae-0f54-42b6-99a8-f767333d7360. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 688.144616] env[62914]: DEBUG nova.network.neutron [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Updating instance_info_cache with network_info: [{"id": "b84edfae-0f54-42b6-99a8-f767333d7360", "address": "fa:16:3e:27:79:be", "network": {"id": "74cef65b-589b-4ed3-aae9-6518635699bf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-420307571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "500bdabe48614a18bf0868e232ee91cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb84edfae-0f", "ovs_interfaceid": "b84edfae-0f54-42b6-99a8-f767333d7360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.168550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.170036] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 688.173507] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.377s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.174222] env[62914]: DEBUG nova.objects.instance [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lazy-loading 'resources' on Instance uuid 69a9cd15-7d6f-464d-b451-e193179088f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 688.414828] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f522c0-a12b-2676-6c64-f974dfa75948, 'name': SearchDatastore_Task, 'duration_secs': 0.016092} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.415924] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.416527] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.417076] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.417453] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.417762] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.418266] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0091efd6-0ac6-4443-bd86-0461aa62332a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.439755] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831556, 'name': CloneVM_Task, 'duration_secs': 1.492153} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.441282] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created linked-clone VM from snapshot [ 688.441673] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.442709] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 688.443380] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8c1de8-f20d-44dc-9c71-bc83b455e1fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.447400] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c7b0eb-9cf4-427b-aa46-9ffdcb9a7e38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.458963] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 688.458963] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52583086-91ec-b512-d8ca-a8fecf0d52b7" [ 688.458963] env[62914]: _type = "Task" [ 688.458963] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.466706] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831557, 'name': PowerOffVM_Task, 'duration_secs': 0.596513} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.467393] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploading image c7276d8e-d0f8-4d5c-b179-a978d8bd33c6 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 688.474538] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 688.474770] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 688.475135] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f253c49-7077-442c-a3e8-b28c5667d785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.485566] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52583086-91ec-b512-d8ca-a8fecf0d52b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.499149] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9b349380-e4d8-4c37-ba3f-5e45a94a058a tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-6fd5f3b8-1175-4bd5-b0b4-12517ba65271-ec98f693-b488-485c-8165-c736ecc6b3d7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.140s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.507193] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 688.507193] env[62914]: value = "vm-941874" [ 688.507193] env[62914]: _type = "VirtualMachine" [ 688.507193] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 688.508146] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9cde9bcb-3ac7-4b8e-99a4-f252d41db858 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.518833] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease: (returnval){ [ 688.518833] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e67d2-a775-5708-1d32-01e76b2f31b8" [ 688.518833] env[62914]: _type = "HttpNfcLease" [ 688.518833] env[62914]: } obtained for exporting VM: (result){ [ 688.518833] env[62914]: value = "vm-941874" [ 688.518833] env[62914]: _type = "VirtualMachine" [ 688.518833] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 688.519501] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the lease: (returnval){ [ 688.519501] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e67d2-a775-5708-1d32-01e76b2f31b8" [ 688.519501] env[62914]: _type = "HttpNfcLease" [ 688.519501] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 688.528900] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 688.528900] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e67d2-a775-5708-1d32-01e76b2f31b8" [ 688.528900] env[62914]: _type = "HttpNfcLease" [ 688.528900] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 688.589308] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 688.589739] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 688.589828] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleting the datastore file [datastore2] 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.591572] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4059e544-5a8e-4a62-bf56-f2577fd57c11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.602035] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 688.602035] env[62914]: value = "task-4831561" [ 688.602035] env[62914]: _type = "Task" [ 688.602035] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.610662] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831558, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.619090] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.645994] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Releasing lock "refresh_cache-567f3d61-ed30-47d9-aebc-77c9392be506" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.646378] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Received event network-vif-deleted-ce7a44ed-a822-4d9c-ac68-4d421b3d5b23 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 688.646586] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Received event network-vif-deleted-510995e6-4d3c-4b63-ae4e-d4c1f74254e8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 688.646768] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-vif-deleted-ec98f693-b488-485c-8165-c736ecc6b3d7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 688.646961] env[62914]: INFO nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Neutron deleted interface ec98f693-b488-485c-8165-c736ecc6b3d7; detaching it from the instance and deleting it from the info cache [ 688.647488] env[62914]: DEBUG nova.network.neutron [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [{"id": "b92603ac-8bea-4f9a-aa50-8c942106916d", "address": "fa:16:3e:2b:69:6f", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92603ac-8b", "ovs_interfaceid": "b92603ac-8bea-4f9a-aa50-8c942106916d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.682431] env[62914]: DEBUG nova.compute.utils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 688.687548] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 688.687548] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 688.783460] env[62914]: DEBUG nova.policy [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '717bc653b83e47568ac0ee983b656c04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4026bc0aca7941a79d5e71bb1a7df1f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 688.978519] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52583086-91ec-b512-d8ca-a8fecf0d52b7, 'name': SearchDatastore_Task, 'duration_secs': 0.025303} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.979511] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f27b261-75f1-451c-aa03-774a05af5c22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.988452] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 688.988452] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab1eeb-66ad-25da-cd43-325096461b92" [ 688.988452] env[62914]: _type = "Task" [ 688.988452] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.997963] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab1eeb-66ad-25da-cd43-325096461b92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.028271] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 689.028271] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e67d2-a775-5708-1d32-01e76b2f31b8" [ 689.028271] env[62914]: _type = "HttpNfcLease" [ 689.028271] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 689.028590] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 689.028590] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e67d2-a775-5708-1d32-01e76b2f31b8" [ 689.028590] env[62914]: _type = "HttpNfcLease" [ 689.028590] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 689.029589] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeffc586-4110-4ae0-b9e6-17cd3b35bf56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.046841] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 689.047166] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 689.129585] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70962} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.130022] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.130226] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 567f3d61-ed30-47d9-aebc-77c9392be506/567f3d61-ed30-47d9-aebc-77c9392be506.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 689.130461] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.130744] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42f42d68-3fcc-4f54-95a7-d4317db7f809 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.140169] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 689.140169] env[62914]: value = "task-4831562" [ 689.140169] env[62914]: _type = "Task" [ 689.140169] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.151390] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Acquiring lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.151684] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.158714] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c295ff7f-f076-48cc-bd61-3ca0527742fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.187647] env[62914]: DEBUG nova.compute.utils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.298676] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Successfully created port: ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.349196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ede05f0-0afc-4e13-925b-2775cc010060 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.356226] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e6f1cd-a660-495e-a446-cae1d7c30b31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.390845] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2c7a7d-636f-487c-9766-df1bd230b5dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.401961] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a860985d-b72d-4be1-9ace-7e4b58dad543 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.420058] env[62914]: DEBUG nova.compute.provider_tree [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.501692] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab1eeb-66ad-25da-cd43-325096461b92, 'name': SearchDatastore_Task, 'duration_secs': 0.022981} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.502519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.502817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 689.503175] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2acfbf94-8e2e-4aa5-a600-0070ea94fe7e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.511786] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 689.511786] env[62914]: value = "task-4831563" [ 689.511786] env[62914]: _type = "Task" [ 689.511786] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.523276] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.633535] env[62914]: DEBUG oslo_vmware.api [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.557967} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.633535] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.633535] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 689.633535] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 689.633535] env[62914]: INFO nova.compute.manager [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Took 2.22 seconds to destroy the instance on the hypervisor. [ 689.633535] env[62914]: DEBUG oslo.service.loopingcall [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.633535] env[62914]: DEBUG nova.compute.manager [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 689.633535] env[62914]: DEBUG nova.network.neutron [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 689.653489] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078547} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.653698] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.654845] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd930732-1afb-4161-8e28-d2a2ae1bbe8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.690013] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 567f3d61-ed30-47d9-aebc-77c9392be506/567f3d61-ed30-47d9-aebc-77c9392be506.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.690013] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fea82407-0f2a-426f-b84b-d7b3b4f60681 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.706740] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 689.722619] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 689.722619] env[62914]: value = "task-4831564" [ 689.722619] env[62914]: _type = "Task" [ 689.722619] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.734078] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831564, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.926287] env[62914]: DEBUG nova.scheduler.client.report [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.029264] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831563, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.235983] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831564, 'name': ReconfigVM_Task, 'duration_secs': 0.37444} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.236640] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 567f3d61-ed30-47d9-aebc-77c9392be506/567f3d61-ed30-47d9-aebc-77c9392be506.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.237616] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-493b2521-015c-4fca-acb2-2a9e2cc94c05 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.245716] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 690.245716] env[62914]: value = "task-4831565" [ 690.245716] env[62914]: _type = "Task" [ 690.245716] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.260993] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831565, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.435731] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.440474] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "e6544702-bde7-4056-8a50-adede5c6a9d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.440701] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.443665] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.344s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.443665] env[62914]: DEBUG nova.objects.instance [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 690.523317] env[62914]: INFO nova.scheduler.client.report [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Deleted allocations for instance 69a9cd15-7d6f-464d-b451-e193179088f7 [ 690.535581] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831563, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.724879] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 690.768590] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831565, 'name': Rename_Task, 'duration_secs': 0.173368} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.769197] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 690.769445] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42653084-45c3-46ca-971e-9c12b9459077 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.776441] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:22:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='475991841',id=30,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-553626161',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.776614] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.776787] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.777052] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.777384] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.777601] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.777958] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.778254] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.778529] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.778770] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.779034] env[62914]: DEBUG nova.virt.hardware [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.780070] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17970be-d68b-4251-b0be-cbfbeda154dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.791474] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b2eb44-2871-4b71-af6b-4ad0d5cc2fb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.795975] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 690.795975] env[62914]: value = "task-4831566" [ 690.795975] env[62914]: _type = "Task" [ 690.795975] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.817200] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831566, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.004250] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Successfully updated port: ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.030528] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831563, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.040049] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4fdbb051-ba3d-4461-9ffe-51195e51314d tempest-DeleteServersAdminTestJSON-1896056451 tempest-DeleteServersAdminTestJSON-1896056451-project-member] Lock "69a9cd15-7d6f-464d-b451-e193179088f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.506s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.309776] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831566, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.384015] env[62914]: DEBUG nova.network.neutron [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.459270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37031b32-a1ae-4153-b29b-4baf1aaeb145 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.460546] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.063s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.513321] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.513594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.513799] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.526831] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831563, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.607561} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.527959] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 691.528833] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.529523] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e984a4f3-6a4a-47e7-a8c4-b07775f04b0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.540517] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 691.540517] env[62914]: value = "task-4831567" [ 691.540517] env[62914]: _type = "Task" [ 691.540517] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.559025] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.717591] env[62914]: DEBUG nova.compute.manager [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Received event network-vif-plugged-ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 691.717831] env[62914]: DEBUG oslo_concurrency.lockutils [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] Acquiring lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.718283] env[62914]: DEBUG oslo_concurrency.lockutils [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.721100] env[62914]: DEBUG oslo_concurrency.lockutils [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.721100] env[62914]: DEBUG nova.compute.manager [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] No waiting events found dispatching network-vif-plugged-ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 691.721100] env[62914]: WARNING nova.compute.manager [req-19a6d490-f9ad-487a-89e5-0ea0c19d72ce req-ce80ad14-11f0-40b2-b517-b711de617268 service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Received unexpected event network-vif-plugged-ba331d08-bcc1-4f3e-b972-ee660dd6c0db for instance with vm_state building and task_state spawning. [ 691.740580] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.741527] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.742643] env[62914]: DEBUG nova.compute.manager [req-3e38a8dc-abc8-44cf-bdc2-25dc0cc15d62 req-1cc3ad04-576f-4e50-890d-f8b346aa6652 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Received event network-vif-deleted-b92603ac-8bea-4f9a-aa50-8c942106916d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 691.811329] env[62914]: DEBUG oslo_vmware.api [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831566, 'name': PowerOnVM_Task, 'duration_secs': 0.57063} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.811329] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 691.811329] env[62914]: INFO nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Took 8.73 seconds to spawn the instance on the hypervisor. [ 691.811329] env[62914]: DEBUG nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 691.812601] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74d8d5a-86ac-46e8-b88c-a50b51eaad4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.886191] env[62914]: INFO nova.compute.manager [-] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Took 2.25 seconds to deallocate network for instance. [ 692.054680] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084691} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.055012] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.056563] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182dcf03-69e7-4573-9887-0e9e77cf1d7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.078966] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.079954] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.082556] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b605d8-681e-4e02-8f8f-3caa3e0e4614 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.106921] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 692.106921] env[62914]: value = "task-4831568" [ 692.106921] env[62914]: _type = "Task" [ 692.106921] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.117451] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831568, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.267523] env[62914]: DEBUG nova.network.neutron [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updating instance_info_cache with network_info: [{"id": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "address": "fa:16:3e:21:57:7d", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba331d08-bc", "ovs_interfaceid": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.336778] env[62914]: INFO nova.compute.manager [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Took 36.60 seconds to build instance. [ 692.393485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.513751] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 3eff61b1-b09c-4a04-821c-cefdc7be3f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.514161] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance a4fca617-da38-4913-b2c8-a2921da6db56 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.514463] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.515097] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e69c36e9-3c59-48e3-9962-ffe8de10a789 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.515097] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.515097] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 9ce44ae9-9369-4c0c-9d14-9c8fde42d612 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.515097] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance ff2cff97-1671-4f97-8f69-532253169ff8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.515348] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 2f7bc586-af68-4d9d-81e2-8247371dfa7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.515507] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance aede8da7-8bf2-4963-b08b-6e06007614a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.515755] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 70a6d3e7-6928-47a7-9f7f-bd5dad64912f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.515919] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1ddb6508-d8fb-4ead-bcb0-370c19bb287d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.516122] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 6bdcd778-0942-41e7-a6fb-7c3413d34ef7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.516344] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 82aab17d-a6d0-48cf-a59a-fbef7d402894 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.516536] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance d8d08c36-bec2-4117-9352-8e148d25dc9e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.517045] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1fa01184-1ed2-43de-bcbf-bd8658acc9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.517045] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bfdd7711-d081-42cf-9e4a-2df556d1b72e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.517305] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 934a0ca3-d879-4b23-90fe-2c190c201a88 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.517541] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4fbb08f0-6712-4e78-b9da-b33a812ec9b7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 692.517741] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 5bba4aa5-2b92-42b4-8516-72298a99f0e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.517906] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 567f3d61-ed30-47d9-aebc-77c9392be506 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.518095] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 54185b06-7ccb-4740-a6ee-213bbfa6365b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 692.619227] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.770721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.771187] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Instance network_info: |[{"id": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "address": "fa:16:3e:21:57:7d", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba331d08-bc", "ovs_interfaceid": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 692.773813] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:57:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2321dbbe-f64a-4253-a462-21676f8a278e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba331d08-bcc1-4f3e-b972-ee660dd6c0db', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.780919] env[62914]: DEBUG oslo.service.loopingcall [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.781609] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 692.781888] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb517bb2-bdc0-4ae6-b3d0-70e2811e8622 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.815868] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.815868] env[62914]: value = "task-4831569" [ 692.815868] env[62914]: _type = "Task" [ 692.815868] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.826198] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831569, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.839670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-590af88f-bde7-4f20-a821-3d354f1c0454 tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.159s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.023361] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance fed831e0-4518-4025-89b1-7f6b644e013d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 693.122123] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831568, 'name': ReconfigVM_Task, 'duration_secs': 0.680829} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.122123] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6/5bba4aa5-2b92-42b4-8516-72298a99f0e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.122123] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d07121d3-f789-46b6-a377-9ae6b3f6eb32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.128455] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 693.128455] env[62914]: value = "task-4831570" [ 693.128455] env[62914]: _type = "Task" [ 693.128455] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.142135] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831570, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.328837] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831569, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.343249] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 693.531617] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 12aa02f0-a232-427a-80ba-1faa12c4d43a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 693.646184] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831570, 'name': Rename_Task, 'duration_secs': 0.31053} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.646467] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 693.646790] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f038c123-e4ab-47d8-9302-6d333ed477b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.658699] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 693.658699] env[62914]: value = "task-4831571" [ 693.658699] env[62914]: _type = "Task" [ 693.658699] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.669464] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.730257] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "567f3d61-ed30-47d9-aebc-77c9392be506" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.730636] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.730960] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.731189] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.731425] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.737289] env[62914]: INFO nova.compute.manager [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Terminating instance [ 693.741604] env[62914]: DEBUG nova.compute.manager [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 693.742565] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 693.745040] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21553234-2e22-4ac3-955e-0641dd6e43a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.754574] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 693.755052] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e185c4da-f36e-4644-9ac7-01b3458e5aa7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.763343] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 693.763343] env[62914]: value = "task-4831572" [ 693.763343] env[62914]: _type = "Task" [ 693.763343] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.775523] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.829384] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831569, 'name': CreateVM_Task, 'duration_secs': 0.669558} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.829686] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 693.830814] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.831053] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.831682] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 693.831991] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d558ea69-8df2-47b9-a0f9-0fe7417a9be2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.838458] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 693.838458] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5299cc4a-f885-3eca-da7a-46b1d9bd8c15" [ 693.838458] env[62914]: _type = "Task" [ 693.838458] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.853928] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5299cc4a-f885-3eca-da7a-46b1d9bd8c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.868975] env[62914]: DEBUG nova.compute.manager [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Received event network-changed-ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 693.869140] env[62914]: DEBUG nova.compute.manager [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Refreshing instance network info cache due to event network-changed-ba331d08-bcc1-4f3e-b972-ee660dd6c0db. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 693.869614] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] Acquiring lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.869708] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] Acquired lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.870674] env[62914]: DEBUG nova.network.neutron [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Refreshing network info cache for port ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 693.879727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.037291] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance cead3557-080d-4956-a957-cac449bb69f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 694.172538] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831571, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.274994] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831572, 'name': PowerOffVM_Task, 'duration_secs': 0.250049} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.284160] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 694.284160] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 694.284160] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd51720f-6f4a-455c-9e6e-0b5527b89af7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.355347] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5299cc4a-f885-3eca-da7a-46b1d9bd8c15, 'name': SearchDatastore_Task, 'duration_secs': 0.018137} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.357866] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.358311] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.358726] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.359194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.359815] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.359815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 694.360229] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 694.360454] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Deleting the datastore file [datastore2] 567f3d61-ed30-47d9-aebc-77c9392be506 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.361062] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52d8b641-1278-4b67-a521-720eab88ef45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.363253] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1eb3f3c9-9664-447a-afff-4abd8bfa321d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.372396] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for the task: (returnval){ [ 694.372396] env[62914]: value = "task-4831574" [ 694.372396] env[62914]: _type = "Task" [ 694.372396] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.379766] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.380139] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 694.381513] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3e928ac-980d-4414-9565-7b94c1a5b8cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.390328] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.394280] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 694.394280] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52083491-b8b3-6cbd-f168-0ad6b920b3d1" [ 694.394280] env[62914]: _type = "Task" [ 694.394280] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.406073] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52083491-b8b3-6cbd-f168-0ad6b920b3d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.538632] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bf2e9634-66ee-4b6a-a148-bc77420d793f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 694.674822] env[62914]: DEBUG oslo_vmware.api [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831571, 'name': PowerOnVM_Task, 'duration_secs': 0.598691} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.675158] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 694.675434] env[62914]: DEBUG nova.compute.manager [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 694.677203] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1c9fb8-fb0c-42f9-aa10-fc8f0d227fc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.753899] env[62914]: DEBUG nova.network.neutron [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updated VIF entry in instance network info cache for port ba331d08-bcc1-4f3e-b972-ee660dd6c0db. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 694.753899] env[62914]: DEBUG nova.network.neutron [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updating instance_info_cache with network_info: [{"id": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "address": "fa:16:3e:21:57:7d", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba331d08-bc", "ovs_interfaceid": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.886239] env[62914]: DEBUG oslo_vmware.api [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Task: {'id': task-4831574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23844} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.886565] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.886828] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 694.887084] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 694.887376] env[62914]: INFO nova.compute.manager [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Took 1.14 seconds to destroy the instance on the hypervisor. [ 694.887706] env[62914]: DEBUG oslo.service.loopingcall [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 694.887993] env[62914]: DEBUG nova.compute.manager [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 694.888234] env[62914]: DEBUG nova.network.neutron [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 694.907279] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52083491-b8b3-6cbd-f168-0ad6b920b3d1, 'name': SearchDatastore_Task, 'duration_secs': 0.017061} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.908142] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1c52a46-4d32-44a1-8778-05314b134261 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.915473] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 694.915473] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fa196e-9f43-1f08-3597-0ea6da45b684" [ 694.915473] env[62914]: _type = "Task" [ 694.915473] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.927341] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fa196e-9f43-1f08-3597-0ea6da45b684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.044030] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4496a977-30b2-4323-a561-884633958cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 695.204582] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.260153] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d0e211a-f286-4a5e-ba41-cfbe95feac1f req-d7665252-eee1-4da3-be81-246140b2abca service nova] Releasing lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.433373] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fa196e-9f43-1f08-3597-0ea6da45b684, 'name': SearchDatastore_Task, 'duration_secs': 0.038618} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.433727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.434063] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/54185b06-7ccb-4740-a6ee-213bbfa6365b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 695.434359] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-126d8db3-ac5b-4005-b221-185ee7df7737 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.444763] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 695.444763] env[62914]: value = "task-4831575" [ 695.444763] env[62914]: _type = "Task" [ 695.444763] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.457889] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831575, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.547424] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 5a704020-921e-4ede-9fd9-b745c027a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 695.799875] env[62914]: DEBUG nova.network.neutron [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.959993] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831575, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.051914] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1342d15d-fbef-4709-adf6-f827bc13d3ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 696.190312] env[62914]: DEBUG nova.compute.manager [req-9d69ecea-24b2-459a-bbf0-d0d61635cc9b req-a99afef4-238c-40d9-8680-e461c78f9348 service nova] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Received event network-vif-deleted-b84edfae-0f54-42b6-99a8-f767333d7360 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 696.252172] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.252461] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.252720] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.252868] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.253065] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.258200] env[62914]: INFO nova.compute.manager [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Terminating instance [ 696.260558] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "refresh_cache-5bba4aa5-2b92-42b4-8516-72298a99f0e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.260726] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquired lock "refresh_cache-5bba4aa5-2b92-42b4-8516-72298a99f0e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.260902] env[62914]: DEBUG nova.network.neutron [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 696.302713] env[62914]: INFO nova.compute.manager [-] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Took 1.41 seconds to deallocate network for instance. [ 696.458082] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831575, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719204} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.458379] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/54185b06-7ccb-4740-a6ee-213bbfa6365b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 696.458602] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.458861] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b931814-40bf-464a-936a-57cc815da635 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.467859] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 696.467859] env[62914]: value = "task-4831576" [ 696.467859] env[62914]: _type = "Task" [ 696.467859] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.479816] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.557546] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 8b83f82b-42f7-4f33-abc4-ff278d343309 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 696.795826] env[62914]: DEBUG nova.network.neutron [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 696.814176] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.907666] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 696.910040] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27413e93-17ab-4592-8fbc-af4f345a3bd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.914343] env[62914]: DEBUG nova.network.neutron [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.921033] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 696.921599] env[62914]: ERROR oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk due to incomplete transfer. [ 696.921757] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-78d81503-d399-4df3-9136-3ffb7e993ff0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.931027] env[62914]: DEBUG oslo_vmware.rw_handles [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf50bf-7a4c-f57a-6d18-fb3002e80546/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 696.932119] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploaded image c7276d8e-d0f8-4d5c-b179-a978d8bd33c6 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 696.934674] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 696.935558] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8b187da2-8b1e-4bb1-9aae-4ee086e518f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.945948] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 696.945948] env[62914]: value = "task-4831577" [ 696.945948] env[62914]: _type = "Task" [ 696.945948] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.952781] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831577, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.979349] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115524} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.979996] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.981018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841d06dc-d46d-45ed-977c-74001278a24d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.013117] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/54185b06-7ccb-4740-a6ee-213bbfa6365b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.013117] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01146de2-ea3d-45f6-a813-79753c5bd09f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.033853] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 697.033853] env[62914]: value = "task-4831578" [ 697.033853] env[62914]: _type = "Task" [ 697.033853] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.042683] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831578, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.060767] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4cea2bd1-a238-4fb6-bc47-719894461228 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 697.418079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Releasing lock "refresh_cache-5bba4aa5-2b92-42b4-8516-72298a99f0e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.418625] env[62914]: DEBUG nova.compute.manager [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 697.418866] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 697.419913] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9addf30d-bcb4-429a-82a0-ac2b5f82db32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.429943] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 697.429943] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d85cc3b-d425-46fc-91e1-80b33bf5c78b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.437743] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 697.437743] env[62914]: value = "task-4831579" [ 697.437743] env[62914]: _type = "Task" [ 697.437743] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.450309] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.456434] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831577, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.545294] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831578, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.565634] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e6544702-bde7-4056-8a50-adede5c6a9d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 697.950760] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831579, 'name': PowerOffVM_Task, 'duration_secs': 0.164913} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.951088] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 697.951269] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 697.951922] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a9b62fa-2a95-4aec-b53f-44e7998ae2f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.962170] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831577, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.981063] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 697.981470] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 697.981583] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Deleting the datastore file [datastore1] 5bba4aa5-2b92-42b4-8516-72298a99f0e6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.981873] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81ae1a39-bb1a-41dd-90ff-045b34a56e22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.988626] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for the task: (returnval){ [ 697.988626] env[62914]: value = "task-4831581" [ 697.988626] env[62914]: _type = "Task" [ 697.988626] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.998091] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.045053] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831578, 'name': ReconfigVM_Task, 'duration_secs': 0.732206} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.045385] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/54185b06-7ccb-4740-a6ee-213bbfa6365b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.045731] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62914) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1373}} [ 698.046439] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-61d0e857-399c-4281-9e7a-7f7cac0fffea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.054556] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 698.054556] env[62914]: value = "task-4831582" [ 698.054556] env[62914]: _type = "Task" [ 698.054556] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.064709] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831582, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.071672] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bc6da94e-4de8-4e56-a071-d04c5e5dad18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 698.071974] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 698.072159] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=100GB used_disk=12GB total_vcpus=48 used_vcpus=11 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '11', 'num_vm_active': '8', 'num_task_None': '6', 'num_os_type_None': '11', 'num_proj_2e460060822e4eda931ae402635e9eb6': '1', 'io_workload': '2', 'num_proj_8e2b3db08ee34716be135d72b3ddda8d': '1', 'num_task_deleting': '1', 'num_proj_2562164f04b045a59b3b501d2b0014ec': '1', 'num_task_image_uploading': '1', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1', 'num_vm_rescued': '1', 'num_proj_df7ae349aea0487d88689eb09933eb1c': '1', 'num_proj_2c4d14e64cb240d9816b0677dc020110': '1', 'num_proj_7dd71bf518024821931bb9add9996d4e': '2', 'num_task_rebuild_spawning': '1', 'num_proj_9cb64b10c45d4024a178ad65d8ba56e0': '1', 'num_vm_building': '2', 'num_task_spawning': '2', 'num_proj_500bdabe48614a18bf0868e232ee91cc': '1', 'num_proj_4026bc0aca7941a79d5e71bb1a7df1f8': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 698.455303] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831577, 'name': Destroy_Task, 'duration_secs': 1.150388} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.455598] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroyed the VM [ 698.455845] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 698.456118] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-63af2f1a-91df-4ce0-8cff-503e9cb8813a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.466419] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 698.466419] env[62914]: value = "task-4831583" [ 698.466419] env[62914]: _type = "Task" [ 698.466419] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.477525] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831583, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.499819] env[62914]: DEBUG oslo_vmware.api [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Task: {'id': task-4831581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14103} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.503334] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 698.503484] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 698.503665] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 698.503844] env[62914]: INFO nova.compute.manager [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Took 1.09 seconds to destroy the instance on the hypervisor. [ 698.504371] env[62914]: DEBUG oslo.service.loopingcall [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.504523] env[62914]: DEBUG nova.compute.manager [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 698.504616] env[62914]: DEBUG nova.network.neutron [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.522660] env[62914]: DEBUG nova.network.neutron [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.566781] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831582, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.076195} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.568181] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62914) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1394}} [ 698.569086] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbb7849-33d1-4d96-9ac3-011b8a56b636 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.595650] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/ephemeral_0.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.597054] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a5f0ccb-58eb-4b1a-8413-ce0c820d98ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.620067] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 698.620067] env[62914]: value = "task-4831584" [ 698.620067] env[62914]: _type = "Task" [ 698.620067] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.631609] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831584, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.634108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642d7b6e-6368-45d0-b1eb-56f479704539 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.642279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4713ad84-5016-4c33-a670-c36cf9739bf9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.675066] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff656050-1b47-4a64-b89e-583d9c004d9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.684691] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a337894c-4216-474b-a120-a2f063594c55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.699455] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.977765] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831583, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.028364] env[62914]: DEBUG nova.network.neutron [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.131762] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831584, 'name': ReconfigVM_Task, 'duration_secs': 0.41443} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.131762] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b/ephemeral_0.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.132133] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1544e3f-5362-4084-b3d4-aa005a168d89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.139209] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 699.139209] env[62914]: value = "task-4831585" [ 699.139209] env[62914]: _type = "Task" [ 699.139209] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.148138] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831585, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.205921] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.479509] env[62914]: DEBUG oslo_vmware.api [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831583, 'name': RemoveSnapshot_Task, 'duration_secs': 0.762892} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.479509] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 699.479701] env[62914]: INFO nova.compute.manager [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 14.18 seconds to snapshot the instance on the hypervisor. [ 699.531538] env[62914]: INFO nova.compute.manager [-] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Took 1.03 seconds to deallocate network for instance. [ 699.649215] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831585, 'name': Rename_Task, 'duration_secs': 0.207768} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.651521] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 699.651521] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff950b69-4966-415e-b4af-e3ac08bc967e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.656808] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 699.656808] env[62914]: value = "task-4831586" [ 699.656808] env[62914]: _type = "Task" [ 699.656808] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.666204] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.711749] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 699.712101] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.252s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.712357] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.016s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.714215] env[62914]: INFO nova.compute.claims [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.036988] env[62914]: DEBUG nova.compute.manager [None req-fb9d9416-ca79-42f7-ae7e-68dff19482a8 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Found 1 images (rotation: 2) {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 700.038797] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.167179] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831586, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.671292] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831586, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.729395] env[62914]: DEBUG nova.compute.manager [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 700.733549] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b6a148-bf78-4452-ac3a-dbb34426b962 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.162590] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bdaa19-190e-4207-bd17-cd4188021a9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.173015] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9680ce96-7c09-414e-86f4-9fca3ba8c11a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.177084] env[62914]: DEBUG oslo_vmware.api [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831586, 'name': PowerOnVM_Task, 'duration_secs': 1.200163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.177386] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 701.177640] env[62914]: INFO nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Took 10.45 seconds to spawn the instance on the hypervisor. [ 701.177883] env[62914]: DEBUG nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 701.178934] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f783dbeb-4f92-4896-a90d-ddf1318b2c6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.212261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516592fc-a9de-4900-8808-f9e7f503b416 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.226793] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5948ce02-f312-49ba-85ea-d11584bcb423 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.240855] env[62914]: DEBUG nova.compute.provider_tree [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.246289] env[62914]: INFO nova.compute.manager [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] instance snapshotting [ 701.246289] env[62914]: DEBUG nova.objects.instance [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.731013] env[62914]: INFO nova.compute.manager [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Took 36.88 seconds to build instance. [ 701.744394] env[62914]: DEBUG nova.scheduler.client.report [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.753111] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5924f7fd-5174-4672-9134-9a34e64e6956 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.774788] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22ca886-d78d-48a0-ae05-f7de7d85a9ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.233619] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f6e2318b-ef47-49a3-9672-5d71c95e418c tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.399s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.252817] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.253375] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 702.258667] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.063s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.258667] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.258667] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.987s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.260262] env[62914]: INFO nova.compute.claims [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.287896] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 702.288208] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-43f0d6c2-99b3-4860-8178-e3be754fdd56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.299287] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 702.299287] env[62914]: value = "task-4831587" [ 702.299287] env[62914]: _type = "Task" [ 702.299287] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.304046] env[62914]: INFO nova.scheduler.client.report [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Deleted allocations for instance a4fca617-da38-4913-b2c8-a2921da6db56 [ 702.314898] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831587, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.736209] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 702.762377] env[62914]: DEBUG nova.compute.utils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.762377] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 702.762377] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 702.812663] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831587, 'name': CreateSnapshot_Task, 'duration_secs': 0.456762} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.812983] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 702.817020] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2547f53-9c14-49cb-83f4-0a2e6da1cfd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.817452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-156484d4-7265-461e-afa9-8cbb5d19568b tempest-ImagesOneServerTestJSON-1204860044 tempest-ImagesOneServerTestJSON-1204860044-project-member] Lock "a4fca617-da38-4913-b2c8-a2921da6db56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.768s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.843416] env[62914]: DEBUG nova.policy [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e219264425c440f942a5b4f8d2bbede', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a787e2eca26741aa951dc2ef5c507766', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.067744] env[62914]: DEBUG nova.compute.manager [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Received event network-changed-ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 703.067947] env[62914]: DEBUG nova.compute.manager [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Refreshing instance network info cache due to event network-changed-ba331d08-bcc1-4f3e-b972-ee660dd6c0db. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 703.068188] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] Acquiring lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.068400] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] Acquired lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.068499] env[62914]: DEBUG nova.network.neutron [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Refreshing network info cache for port ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 703.268829] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 703.277174] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.339037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 703.339037] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-21cf5334-48ec-41ac-92cd-986a5f806328 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.348735] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 703.348735] env[62914]: value = "task-4831588" [ 703.348735] env[62914]: _type = "Task" [ 703.348735] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.360199] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831588, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.597336] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Successfully created port: df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.856507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6397cc30-8673-4c70-9c32-b35741ce0c9b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.868771] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831588, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.872235] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd54caa9-7cce-4743-b0c2-45e13390bc51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.916561] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceba6d7-47ee-492e-a0b6-7bea26797ad1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.925715] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fba8e5-2f26-4c49-9d2d-12a2a80833ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.942204] env[62914]: DEBUG nova.compute.provider_tree [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.301721] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 704.341038] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.341038] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.341038] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.341038] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.341305] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.341436] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.341650] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.341814] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.341983] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.342593] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.342593] env[62914]: DEBUG nova.virt.hardware [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.343302] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1941388-1b6c-4a2d-8b37-411e8e2b6ebd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.360870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68770821-08c6-4eab-a00d-3dc9227840c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.372407] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831588, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.445280] env[62914]: DEBUG nova.scheduler.client.report [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.507020] env[62914]: DEBUG nova.network.neutron [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updated VIF entry in instance network info cache for port ba331d08-bcc1-4f3e-b972-ee660dd6c0db. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 704.507020] env[62914]: DEBUG nova.network.neutron [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updating instance_info_cache with network_info: [{"id": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "address": "fa:16:3e:21:57:7d", "network": {"id": "81fe075d-b8de-4a39-a087-f092a63bf0f4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1335370714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4026bc0aca7941a79d5e71bb1a7df1f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba331d08-bc", "ovs_interfaceid": "ba331d08-bcc1-4f3e-b972-ee660dd6c0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.866370] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831588, 'name': CloneVM_Task, 'duration_secs': 1.3257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.866848] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created linked-clone VM from snapshot [ 704.868334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfead10a-2837-419f-9beb-f303ba4d4d9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.885106] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploading image f83a00c5-f071-49fa-b609-63dfc772cd21 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 704.911606] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 704.911606] env[62914]: value = "vm-941877" [ 704.911606] env[62914]: _type = "VirtualMachine" [ 704.911606] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 704.911983] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c6d2ec63-8759-41ce-a307-56c248d309b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.921398] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease: (returnval){ [ 704.921398] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52216248-6fef-f47b-f88f-0fd5d9b07bd3" [ 704.921398] env[62914]: _type = "HttpNfcLease" [ 704.921398] env[62914]: } obtained for exporting VM: (result){ [ 704.921398] env[62914]: value = "vm-941877" [ 704.921398] env[62914]: _type = "VirtualMachine" [ 704.921398] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 704.921873] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the lease: (returnval){ [ 704.921873] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52216248-6fef-f47b-f88f-0fd5d9b07bd3" [ 704.921873] env[62914]: _type = "HttpNfcLease" [ 704.921873] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 704.931203] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 704.931203] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52216248-6fef-f47b-f88f-0fd5d9b07bd3" [ 704.931203] env[62914]: _type = "HttpNfcLease" [ 704.931203] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 704.952033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.952033] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 704.956394] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.749s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.956394] env[62914]: INFO nova.compute.claims [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.008542] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8715c45-84a2-4b15-a09d-814896047ab2 req-3fd3570e-d3b1-485c-b3b6-609f1325c8fd service nova] Releasing lock "refresh_cache-54185b06-7ccb-4740-a6ee-213bbfa6365b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.391631] env[62914]: DEBUG nova.compute.manager [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Received event network-vif-plugged-df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 705.392346] env[62914]: DEBUG oslo_concurrency.lockutils [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] Acquiring lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.392346] env[62914]: DEBUG oslo_concurrency.lockutils [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] Lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.392346] env[62914]: DEBUG oslo_concurrency.lockutils [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] Lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.392569] env[62914]: DEBUG nova.compute.manager [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] No waiting events found dispatching network-vif-plugged-df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 705.392653] env[62914]: WARNING nova.compute.manager [req-12c888da-da78-4807-9990-0035550ad19d req-90357998-db07-4fd0-b08a-0607a258a026 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Received unexpected event network-vif-plugged-df40a274-9dea-4b4c-be39-6e7556e77032 for instance with vm_state building and task_state spawning. [ 705.437068] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 705.437068] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52216248-6fef-f47b-f88f-0fd5d9b07bd3" [ 705.437068] env[62914]: _type = "HttpNfcLease" [ 705.437068] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 705.437433] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 705.437433] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52216248-6fef-f47b-f88f-0fd5d9b07bd3" [ 705.437433] env[62914]: _type = "HttpNfcLease" [ 705.437433] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 705.438288] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f7d6cc-a39f-454f-ba0c-08a9dea4148b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.451585] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 705.454472] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 705.461391] env[62914]: DEBUG nova.compute.utils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.467667] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 705.468127] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 705.563284] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d903ee68-3a05-4a85-8494-13291bbbccde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.593084] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Successfully updated port: df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.601813] env[62914]: DEBUG nova.policy [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5da18e2dc49746d8a7125efdc106d62b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd271710592bf47b79e16552221fe7107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.968176] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 706.096982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.096982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquired lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.096982] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.181741] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Successfully created port: 6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.543920] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9b835f-d3c2-4e81-b347-8f7c39ac959f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.552889] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bebd1b8-6fb9-4b0b-8d11-8b6acf79daae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.589123] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3c6ae0-0acd-4d38-9ac5-7f3818e37d36 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.598598] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850a0afb-818b-408e-9790-1ad294ac53bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.618326] env[62914]: DEBUG nova.compute.provider_tree [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.655629] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.854038] env[62914]: DEBUG nova.network.neutron [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updating instance_info_cache with network_info: [{"id": "df40a274-9dea-4b4c-be39-6e7556e77032", "address": "fa:16:3e:b0:58:25", "network": {"id": "f81a07e3-8a2d-4ae8-afdd-0fcf0588d2d0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-312614933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a787e2eca26741aa951dc2ef5c507766", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf40a274-9d", "ovs_interfaceid": "df40a274-9dea-4b4c-be39-6e7556e77032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.983524] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 707.013680] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.014429] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.014607] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.014799] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.014950] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.015126] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.015345] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.015545] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.016051] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.016334] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.016563] env[62914]: DEBUG nova.virt.hardware [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.017530] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49709ce-5a63-440d-b1ad-afc9060de113 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.029086] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6780c13-c368-437a-9355-0f739401c5e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.122298] env[62914]: DEBUG nova.scheduler.client.report [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 707.361057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Releasing lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.361444] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Instance network_info: |[{"id": "df40a274-9dea-4b4c-be39-6e7556e77032", "address": "fa:16:3e:b0:58:25", "network": {"id": "f81a07e3-8a2d-4ae8-afdd-0fcf0588d2d0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-312614933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a787e2eca26741aa951dc2ef5c507766", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf40a274-9d", "ovs_interfaceid": "df40a274-9dea-4b4c-be39-6e7556e77032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 707.361904] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:58:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df40a274-9dea-4b4c-be39-6e7556e77032', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.370859] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Creating folder: Project (a787e2eca26741aa951dc2ef5c507766). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.371819] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7faf53af-5a3d-4acd-ae61-316fd6a2c23e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.386845] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Created folder: Project (a787e2eca26741aa951dc2ef5c507766) in parent group-v941773. [ 707.387160] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Creating folder: Instances. Parent ref: group-v941878. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.387336] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab9316db-3b79-4ab0-9814-457756ddfdaa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.400354] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Created folder: Instances in parent group-v941878. [ 707.402169] env[62914]: DEBUG oslo.service.loopingcall [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.402169] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.402169] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bbdf13b-f7d1-4365-92c6-2e0ffc2384b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.427050] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.427050] env[62914]: value = "task-4831592" [ 707.427050] env[62914]: _type = "Task" [ 707.427050] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.434043] env[62914]: DEBUG nova.compute.manager [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Received event network-changed-df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 707.434321] env[62914]: DEBUG nova.compute.manager [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Refreshing instance network info cache due to event network-changed-df40a274-9dea-4b4c-be39-6e7556e77032. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 707.434647] env[62914]: DEBUG oslo_concurrency.lockutils [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] Acquiring lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.434800] env[62914]: DEBUG oslo_concurrency.lockutils [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] Acquired lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.435081] env[62914]: DEBUG nova.network.neutron [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Refreshing network info cache for port df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 707.447953] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831592, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.628901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.628901] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 707.631231] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.430s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.632785] env[62914]: INFO nova.compute.claims [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.948803] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831592, 'name': CreateVM_Task, 'duration_secs': 0.385747} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.949182] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 707.951858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.951858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.951858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.951858] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35bc63d2-5cb5-42da-8e3d-331ca8d116e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.958459] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 707.958459] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521577a6-1f5a-050e-8001-8bcf1a086332" [ 707.958459] env[62914]: _type = "Task" [ 707.958459] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.974680] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521577a6-1f5a-050e-8001-8bcf1a086332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.144050] env[62914]: DEBUG nova.compute.utils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.144050] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 708.176432] env[62914]: DEBUG nova.compute.manager [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Received event network-vif-plugged-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 708.176830] env[62914]: DEBUG oslo_concurrency.lockutils [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] Acquiring lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.176905] env[62914]: DEBUG oslo_concurrency.lockutils [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.177180] env[62914]: DEBUG oslo_concurrency.lockutils [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.177389] env[62914]: DEBUG nova.compute.manager [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] No waiting events found dispatching network-vif-plugged-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 708.177694] env[62914]: WARNING nova.compute.manager [req-d78dd8bf-f199-4e7a-b561-a1b16d9115b5 req-620c5fba-0bc4-40ee-b682-1f91bb34e5f4 service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Received unexpected event network-vif-plugged-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 for instance with vm_state building and task_state spawning. [ 708.180789] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Successfully updated port: 6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.185278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.185278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.185278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.185278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.185278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.187968] env[62914]: INFO nova.compute.manager [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Terminating instance [ 708.189552] env[62914]: DEBUG nova.compute.manager [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 708.189717] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 708.190715] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb25664-c2c8-4a35-8e5b-a74a3ef21607 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.209126] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 708.209589] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82a435f3-7450-4c5a-a64b-2956c9dfe3df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.218356] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 708.218356] env[62914]: value = "task-4831593" [ 708.218356] env[62914]: _type = "Task" [ 708.218356] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.229638] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.399218] env[62914]: DEBUG nova.network.neutron [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updated VIF entry in instance network info cache for port df40a274-9dea-4b4c-be39-6e7556e77032. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 708.399664] env[62914]: DEBUG nova.network.neutron [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updating instance_info_cache with network_info: [{"id": "df40a274-9dea-4b4c-be39-6e7556e77032", "address": "fa:16:3e:b0:58:25", "network": {"id": "f81a07e3-8a2d-4ae8-afdd-0fcf0588d2d0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-312614933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a787e2eca26741aa951dc2ef5c507766", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf40a274-9d", "ovs_interfaceid": "df40a274-9dea-4b4c-be39-6e7556e77032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.473875] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521577a6-1f5a-050e-8001-8bcf1a086332, 'name': SearchDatastore_Task, 'duration_secs': 0.014675} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.475020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.479024] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.479024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.479024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.479024] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.479024] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c6054d9-99ef-4d37-8d2b-c6365fae5516 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.489256] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.489450] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 708.490576] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9be68bb-50fd-49c3-b061-2c7392724cfd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.500609] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 708.500609] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5226591d-3978-2254-aff2-14bacb8a7f5f" [ 708.500609] env[62914]: _type = "Task" [ 708.500609] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.515220] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5226591d-3978-2254-aff2-14bacb8a7f5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.650944] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 708.685018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.685073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.685314] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 708.731338] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831593, 'name': PowerOffVM_Task, 'duration_secs': 0.315661} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.731984] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 708.731984] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 708.732135] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb86a0fb-0034-4abf-b374-6ed718573825 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.814088] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 708.814250] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 708.814395] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Deleting the datastore file [datastore1] 82aab17d-a6d0-48cf-a59a-fbef7d402894 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 708.814704] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c94e0a03-f99e-422e-b5e9-99c1cc7ed57d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.823530] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for the task: (returnval){ [ 708.823530] env[62914]: value = "task-4831595" [ 708.823530] env[62914]: _type = "Task" [ 708.823530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.834166] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.905791] env[62914]: DEBUG oslo_concurrency.lockutils [req-a679a07f-5235-42b8-af82-0a0d50a0b623 req-0f1997ac-a686-4ae3-8339-bb3378c4a966 service nova] Releasing lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.015975] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5226591d-3978-2254-aff2-14bacb8a7f5f, 'name': SearchDatastore_Task, 'duration_secs': 0.015626} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.019217] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96427626-98d0-4c7b-ae2d-370d65defb28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.029110] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 709.029110] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f8ae63-2449-6e18-e52b-a9737d22861b" [ 709.029110] env[62914]: _type = "Task" [ 709.029110] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.044041] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f8ae63-2449-6e18-e52b-a9737d22861b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.254957] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.338708] env[62914]: DEBUG oslo_vmware.api [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Task: {'id': task-4831595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406263} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.342527] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 709.342785] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 709.342975] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 709.343175] env[62914]: INFO nova.compute.manager [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Took 1.15 seconds to destroy the instance on the hypervisor. [ 709.343465] env[62914]: DEBUG oslo.service.loopingcall [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 709.343679] env[62914]: DEBUG nova.compute.manager [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 709.343771] env[62914]: DEBUG nova.network.neutron [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 709.417247] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257260a6-a792-4e5e-a83f-c487d63caafe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.426659] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e758ceb-b6eb-446c-ba60-4f18d21c6589 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.466564] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd4a4c7-2a12-4d02-b4db-005750acf507 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.480021] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60281d05-2bb2-47c0-9305-5f2988640176 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.497613] env[62914]: DEBUG nova.compute.provider_tree [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.548896] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f8ae63-2449-6e18-e52b-a9737d22861b, 'name': SearchDatastore_Task, 'duration_secs': 0.026917} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.548953] env[62914]: DEBUG nova.network.neutron [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Updating instance_info_cache with network_info: [{"id": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "address": "fa:16:3e:61:28:0f", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faadfd4-7b", "ovs_interfaceid": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.550328] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.550636] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] fed831e0-4518-4025-89b1-7f6b644e013d/fed831e0-4518-4025-89b1-7f6b644e013d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 709.550915] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccfbc1be-a169-4731-8f4e-c461dc82538a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.560151] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 709.560151] env[62914]: value = "task-4831596" [ 709.560151] env[62914]: _type = "Task" [ 709.560151] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.569782] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.664649] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 709.702202] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.702202] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.702202] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.702202] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.702872] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.702872] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.703133] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.703248] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.703736] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.703960] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.704198] env[62914]: DEBUG nova.virt.hardware [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.709237] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f092c071-1e80-45ce-8d3b-ecd195faa1fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.715029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df39c64-dbd4-4e04-839e-f911f3c49dff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.733447] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.741027] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Creating folder: Project (8eeb6bcb893c4c41ab1919a4ba4e66df). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 709.741567] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60b92a5e-f1c2-4130-8d9a-626cf9c97d07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.755048] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Created folder: Project (8eeb6bcb893c4c41ab1919a4ba4e66df) in parent group-v941773. [ 709.755048] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Creating folder: Instances. Parent ref: group-v941881. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 709.755252] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0beaf93-8c8a-4192-98b3-59320a1b942d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.768886] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Created folder: Instances in parent group-v941881. [ 709.769303] env[62914]: DEBUG oslo.service.loopingcall [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 709.769540] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 709.769815] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46873c5d-1d0f-4604-8683-30b66db863bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.788631] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.788631] env[62914]: value = "task-4831599" [ 709.788631] env[62914]: _type = "Task" [ 709.788631] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.800410] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831599, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.001731] env[62914]: DEBUG nova.scheduler.client.report [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.057030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.057502] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Instance network_info: |[{"id": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "address": "fa:16:3e:61:28:0f", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faadfd4-7b", "ovs_interfaceid": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 710.057735] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:28:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6faadfd4-7bac-4fbc-a32f-f2742bdff1f6', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.072563] env[62914]: DEBUG oslo.service.loopingcall [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.073791] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 710.077335] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30b17082-b333-4406-811a-135fdfe39f39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.104943] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831596, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.106954] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.106954] env[62914]: value = "task-4831600" [ 710.106954] env[62914]: _type = "Task" [ 710.106954] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.307290] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831599, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.511082] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.880s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.511672] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 710.515359] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.292s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.515595] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.517731] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.593s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.517931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.519982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.258s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.523369] env[62914]: INFO nova.compute.claims [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.558461] env[62914]: INFO nova.scheduler.client.report [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Deleted allocations for instance 1ddb6508-d8fb-4ead-bcb0-370c19bb287d [ 710.566682] env[62914]: INFO nova.scheduler.client.report [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocations for instance 70a6d3e7-6928-47a7-9f7f-bd5dad64912f [ 710.586660] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665119} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.587064] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] fed831e0-4518-4025-89b1-7f6b644e013d/fed831e0-4518-4025-89b1-7f6b644e013d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 710.587886] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.588261] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c38cfc8a-aa18-4c8b-89fe-738af0e296f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.604433] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 710.604433] env[62914]: value = "task-4831601" [ 710.604433] env[62914]: _type = "Task" [ 710.604433] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.620699] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.632450] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831600, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.745801] env[62914]: DEBUG nova.compute.manager [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Received event network-changed-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 710.746151] env[62914]: DEBUG nova.compute.manager [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Refreshing instance network info cache due to event network-changed-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 710.746612] env[62914]: DEBUG oslo_concurrency.lockutils [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] Acquiring lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.746612] env[62914]: DEBUG oslo_concurrency.lockutils [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] Acquired lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.746612] env[62914]: DEBUG nova.network.neutron [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Refreshing network info cache for port 6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 710.803465] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831599, 'name': CreateVM_Task, 'duration_secs': 0.77661} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.804145] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 710.804145] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.804489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.804610] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 710.805024] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9444db2b-5b97-4102-85a9-7fd79d85ab72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.812482] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 710.812482] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52466656-ad14-f900-4b43-325e0844e21d" [ 710.812482] env[62914]: _type = "Task" [ 710.812482] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.822931] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52466656-ad14-f900-4b43-325e0844e21d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.020335] env[62914]: DEBUG nova.compute.utils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.022949] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 711.022949] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 711.067697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-72e9c983-5bb9-47d4-8838-a21284ceeb4d tempest-TenantUsagesTestJSON-824276567 tempest-TenantUsagesTestJSON-824276567-project-member] Lock "1ddb6508-d8fb-4ead-bcb0-370c19bb287d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.503s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.074023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aeeb57b1-c6b7-487f-843a-eaa28fb14284 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "70a6d3e7-6928-47a7-9f7f-bd5dad64912f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.503s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.122288] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120951} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.126438] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.127202] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831600, 'name': CreateVM_Task, 'duration_secs': 0.651508} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.127994] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57760702-dadf-42d5-8ef2-7c162b8b2f50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.130840] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 711.131547] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.158776] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] fed831e0-4518-4025-89b1-7f6b644e013d/fed831e0-4518-4025-89b1-7f6b644e013d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.161070] env[62914]: DEBUG nova.policy [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e7fc206df6e49e49be0cc2a2b16212b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9eecd9c2bf4a449e9c31baecc40fe4a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 711.162952] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81f3d16-764c-45fb-97e7-d2f45f48a8ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.186802] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 711.186802] env[62914]: value = "task-4831602" [ 711.186802] env[62914]: _type = "Task" [ 711.186802] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.196236] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831602, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.327361] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52466656-ad14-f900-4b43-325e0844e21d, 'name': SearchDatastore_Task, 'duration_secs': 0.018755} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.327361] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.327876] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.332024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.332024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.332024] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.332024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.332024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 711.332024] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-125239ad-4a1e-4fe4-9ad0-d1320eef3fc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.332368] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e56e37f0-2ddd-4e86-8820-6a00f22a775a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.339375] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 711.339375] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244c09f-086e-f598-fffc-d58a7be2bfc8" [ 711.339375] env[62914]: _type = "Task" [ 711.339375] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.345277] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.345949] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 711.348811] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9be7247c-95e2-4c5c-970c-1a0ce4782caa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.361442] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244c09f-086e-f598-fffc-d58a7be2bfc8, 'name': SearchDatastore_Task, 'duration_secs': 0.012144} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.362177] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.362496] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.366025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.366400] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 711.366400] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522920f8-a191-4215-9aed-23e31ac1393e" [ 711.366400] env[62914]: _type = "Task" [ 711.366400] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.381243] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522920f8-a191-4215-9aed-23e31ac1393e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.427522] env[62914]: DEBUG nova.network.neutron [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.526850] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 711.704833] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831602, 'name': ReconfigVM_Task, 'duration_secs': 0.481708} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.705524] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Reconfigured VM instance instance-00000021 to attach disk [datastore2] fed831e0-4518-4025-89b1-7f6b644e013d/fed831e0-4518-4025-89b1-7f6b644e013d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.707383] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb5e2a6b-cbdf-4c0f-a1f0-cb58ce21c7ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.718147] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 711.718147] env[62914]: value = "task-4831603" [ 711.718147] env[62914]: _type = "Task" [ 711.718147] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.739763] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831603, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.741166] env[62914]: DEBUG nova.network.neutron [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Updated VIF entry in instance network info cache for port 6faadfd4-7bac-4fbc-a32f-f2742bdff1f6. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 711.741166] env[62914]: DEBUG nova.network.neutron [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Updating instance_info_cache with network_info: [{"id": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "address": "fa:16:3e:61:28:0f", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faadfd4-7b", "ovs_interfaceid": "6faadfd4-7bac-4fbc-a32f-f2742bdff1f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.877810] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522920f8-a191-4215-9aed-23e31ac1393e, 'name': SearchDatastore_Task, 'duration_secs': 0.011428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.881500] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8df89b9-ef15-497b-83a6-57ff3ef4622d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.888675] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 711.888675] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bcfbf-519b-0c79-18df-34217512fad9" [ 711.888675] env[62914]: _type = "Task" [ 711.888675] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.901329] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bcfbf-519b-0c79-18df-34217512fad9, 'name': SearchDatastore_Task, 'duration_secs': 0.010663} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.901446] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.901670] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 711.902123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.902255] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.902749] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73c50752-d843-4cce-a673-04106f002ea4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.904497] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4414997d-451e-47fd-b314-955f2847b359 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.918830] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 711.918830] env[62914]: value = "task-4831604" [ 711.918830] env[62914]: _type = "Task" [ 711.918830] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.919051] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.919160] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 711.924030] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef3e581c-7079-47b9-b557-b3634403dd36 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.929300] env[62914]: INFO nova.compute.manager [-] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Took 2.59 seconds to deallocate network for instance. [ 711.936592] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.936959] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 711.936959] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c8b5f-aac5-91a3-c265-375cac71c071" [ 711.936959] env[62914]: _type = "Task" [ 711.936959] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.953852] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c8b5f-aac5-91a3-c265-375cac71c071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.139733] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Successfully created port: 13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.145203] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0cce30-74b2-44d6-8d08-fabb7b01ddbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.161893] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a615862-beee-4820-b10d-1d9deeb713aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.197254] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bd9cee-24db-4234-a8f1-e1dcaf69f97f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.209381] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308ce2a1-8db0-42d4-a91d-f32f65ca515d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.232175] env[62914]: DEBUG nova.compute.provider_tree [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.243842] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831603, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.244418] env[62914]: DEBUG oslo_concurrency.lockutils [req-352234fa-137b-488b-9c55-d26cdbee3c88 req-118c44b7-e696-4e59-93e8-aed49f98e1dd service nova] Releasing lock "refresh_cache-12aa02f0-a232-427a-80ba-1faa12c4d43a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.435689] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513073} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.436167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 712.436324] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.436640] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b4950ea-7719-44bd-9291-fe3b9ca81f84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.445549] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.446079] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 712.446079] env[62914]: value = "task-4831605" [ 712.446079] env[62914]: _type = "Task" [ 712.446079] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.470647] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.477237] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c8b5f-aac5-91a3-c265-375cac71c071, 'name': SearchDatastore_Task, 'duration_secs': 0.021756} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.477237] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48456370-bbcc-48dd-bcd6-3c235a8247e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.491296] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 712.491296] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5271fd50-6f75-56fe-7d99-930f40fbe83d" [ 712.491296] env[62914]: _type = "Task" [ 712.491296] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.507130] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5271fd50-6f75-56fe-7d99-930f40fbe83d, 'name': SearchDatastore_Task, 'duration_secs': 0.010903} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.507130] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.507130] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 12aa02f0-a232-427a-80ba-1faa12c4d43a/12aa02f0-a232-427a-80ba-1faa12c4d43a.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 712.507130] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a97b2438-4aa8-42fe-b28e-9b8b9d90b685 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.515753] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 712.515753] env[62914]: value = "task-4831606" [ 712.515753] env[62914]: _type = "Task" [ 712.515753] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.528628] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.548035] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 712.597047] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.597369] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.597548] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.597742] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.597906] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.598162] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.598428] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.598613] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.598975] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.599152] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.599213] env[62914]: DEBUG nova.virt.hardware [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.600214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd6200f-f504-4e88-a352-bfae8e22fc7e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.609966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78ad4ef-0955-4d9f-9c44-0cba22f18021 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.738899] env[62914]: DEBUG nova.scheduler.client.report [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.756031] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831603, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.971699] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155848} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.971699] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.973278] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91c9eeb-e8ec-4f7d-94fc-9171d87f0705 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.018218] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.019056] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d1c12ae-ce87-48c7-95a5-de3cea8dde7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.058027] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831606, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.058027] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 713.058027] env[62914]: value = "task-4831607" [ 713.058027] env[62914]: _type = "Task" [ 713.058027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.075406] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831607, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.244887] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831603, 'name': Rename_Task, 'duration_secs': 1.195049} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.245219] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 713.245525] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f47d554-6a34-41ba-abc8-6907c021a6f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.248118] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.249605] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 713.252116] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.252333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.254513] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.533s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.255972] env[62914]: INFO nova.compute.claims [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.264966] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 713.264966] env[62914]: value = "task-4831608" [ 713.264966] env[62914]: _type = "Task" [ 713.264966] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.275608] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.294332] env[62914]: INFO nova.scheduler.client.report [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Deleted allocations for instance e69c36e9-3c59-48e3-9962-ffe8de10a789 [ 713.324390] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 713.327331] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b5bb69-2698-42cd-a2b5-4f1308992fff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.332706] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 713.332879] env[62914]: ERROR oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk due to incomplete transfer. [ 713.333207] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6595ba63-cc43-463a-be0a-f451b42c6fa7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.343256] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3e9f3-7c1f-03bb-8d62-9157afbc463f/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 713.343499] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploaded image f83a00c5-f071-49fa-b609-63dfc772cd21 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 713.345799] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 713.346091] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-71f3f68c-75af-42eb-b6d4-c5089291ed0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.355500] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 713.355500] env[62914]: value = "task-4831609" [ 713.355500] env[62914]: _type = "Task" [ 713.355500] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.365368] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831609, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.532021] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831606, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684781} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.533044] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 12aa02f0-a232-427a-80ba-1faa12c4d43a/12aa02f0-a232-427a-80ba-1faa12c4d43a.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 713.533044] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.533044] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1f142c4-dfd6-487a-898f-8cf8e039e831 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.542118] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 713.542118] env[62914]: value = "task-4831610" [ 713.542118] env[62914]: _type = "Task" [ 713.542118] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.553798] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.571140] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831607, 'name': ReconfigVM_Task, 'duration_secs': 0.423277} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.571482] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfigured VM instance instance-00000023 to attach disk [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.572184] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cd8d032-bf3e-4656-ba37-0590ea21b498 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.585075] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 713.585075] env[62914]: value = "task-4831611" [ 713.585075] env[62914]: _type = "Task" [ 713.585075] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.596021] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831611, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.764682] env[62914]: DEBUG nova.compute.utils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.772072] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 713.773176] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 713.792110] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831608, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.810687] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6c33eb6-c4da-420f-8098-fc7432db7d67 tempest-ServersTestBootFromVolume-1983048922 tempest-ServersTestBootFromVolume-1983048922-project-member] Lock "e69c36e9-3c59-48e3-9962-ffe8de10a789" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.543s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.840091] env[62914]: DEBUG nova.compute.manager [req-a4995fb9-b9d0-4d34-9576-8834a4a55e88 req-a840f59d-1472-4bf8-aa80-c4594d8707e9 service nova] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Received event network-vif-deleted-eaf21f1d-ed3e-4cf6-ba5e-8213a6dd21bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 713.842136] env[62914]: DEBUG nova.policy [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e903c202234339bea9f5a04cf1a399', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5214037507114e7eafb35779261bde06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 713.867683] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831609, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.055395] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10953} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.056296] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 714.057388] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b82cadb-602d-408a-bad7-35e074a18a61 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.089748] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 12aa02f0-a232-427a-80ba-1faa12c4d43a/12aa02f0-a232-427a-80ba-1faa12c4d43a.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 714.090178] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b432e93-d311-42b4-8fca-d91960fc2899 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.116359] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831611, 'name': Rename_Task, 'duration_secs': 0.171497} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.118266] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 714.118702] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 714.118702] env[62914]: value = "task-4831612" [ 714.118702] env[62914]: _type = "Task" [ 714.118702] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.118947] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01d7fce9-94b5-4d51-8d6c-adf99fa88ea5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.132609] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.134345] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 714.134345] env[62914]: value = "task-4831613" [ 714.134345] env[62914]: _type = "Task" [ 714.134345] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.144819] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.193334] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "76dfbf82-0ed0-4621-890c-060b187b47e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.193550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.271956] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 714.288560] env[62914]: DEBUG oslo_vmware.api [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831608, 'name': PowerOnVM_Task, 'duration_secs': 0.855643} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.288920] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 714.289155] env[62914]: INFO nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Took 9.99 seconds to spawn the instance on the hypervisor. [ 714.289394] env[62914]: DEBUG nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 714.290242] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3df178a-87bb-4f1e-b16a-56baa8219e48 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.376198] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831609, 'name': Destroy_Task, 'duration_secs': 0.70275} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.376198] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroyed the VM [ 714.376198] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 714.376198] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-68c37160-342c-4249-b642-93ddad15b368 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.386650] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 714.386650] env[62914]: value = "task-4831614" [ 714.386650] env[62914]: _type = "Task" [ 714.386650] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.406957] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831614, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.446231] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Successfully created port: 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.635964] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831612, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.648270] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831613, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.816671] env[62914]: INFO nova.compute.manager [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Took 46.16 seconds to build instance. [ 714.884925] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222485c7-64f0-4cb0-b48c-0447fa7db107 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.899833] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d3354c-26e9-4b0a-8e9a-4ce6417a5346 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.906210] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831614, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.940636] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b375272-c290-4670-aefd-5ce627fd8b13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.950936] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe49b5a6-7824-41dd-b101-3d5ca584e803 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.967106] env[62914]: DEBUG nova.compute.provider_tree [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.109220] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Successfully updated port: 13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.138453] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831612, 'name': ReconfigVM_Task, 'duration_secs': 0.55906} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.148020] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 12aa02f0-a232-427a-80ba-1faa12c4d43a/12aa02f0-a232-427a-80ba-1faa12c4d43a.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.148020] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d14128b9-aee1-4328-9b43-2a9ebc82f8be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.155746] env[62914]: DEBUG oslo_vmware.api [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831613, 'name': PowerOnVM_Task, 'duration_secs': 0.582211} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.157453] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 715.157731] env[62914]: INFO nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Took 5.49 seconds to spawn the instance on the hypervisor. [ 715.157891] env[62914]: DEBUG nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 715.158268] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 715.158268] env[62914]: value = "task-4831615" [ 715.158268] env[62914]: _type = "Task" [ 715.158268] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.159113] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686303fd-c887-44ef-b713-acca7f062393 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.179828] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831615, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.286314] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 715.317886] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 715.318226] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.318306] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.318494] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.322019] env[62914]: DEBUG nova.virt.hardware [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.322019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec72fd9-8feb-4521-a7e6-3bbc6bbf3964 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.323992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6d1c2586-7872-40f5-8d4b-13f34b7b71e7 tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.416s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.331931] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27568cd9-7097-4b46-a0b0-a47c20a9fe4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.403299] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831614, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.470619] env[62914]: DEBUG nova.scheduler.client.report [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.612651] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.612651] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquired lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.612651] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 715.687209] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831615, 'name': Rename_Task, 'duration_secs': 0.252978} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.687209] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 715.687405] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3643cc6-f8bc-4c0e-81f5-1147835f6c3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.699351] env[62914]: INFO nova.compute.manager [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Took 42.52 seconds to build instance. [ 715.702195] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 715.702195] env[62914]: value = "task-4831616" [ 715.702195] env[62914]: _type = "Task" [ 715.702195] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.714838] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.830723] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 715.903787] env[62914]: DEBUG oslo_vmware.api [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831614, 'name': RemoveSnapshot_Task, 'duration_secs': 1.17109} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.908466] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 715.910999] env[62914]: INFO nova.compute.manager [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 14.16 seconds to snapshot the instance on the hypervisor. [ 715.978915] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.981562] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 715.985760] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.203s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.985760] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.986285] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.617s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.986490] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.990587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.456s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.992434] env[62914]: INFO nova.compute.claims [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.037846] env[62914]: INFO nova.scheduler.client.report [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleted allocations for instance 9ce44ae9-9369-4c0c-9d14-9c8fde42d612 [ 716.039684] env[62914]: INFO nova.scheduler.client.report [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted allocations for instance d8d08c36-bec2-4117-9352-8e148d25dc9e [ 716.187696] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.204255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69edd90a-7432-4424-8ea0-4a8dd7422b05 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.595s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.216607] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831616, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.358220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.478469] env[62914]: DEBUG nova.compute.manager [None req-f8c41f21-1064-4819-a62e-259b8baddc67 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Found 2 images (rotation: 2) {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 716.503150] env[62914]: DEBUG nova.compute.utils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.507873] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 716.510184] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 716.518223] env[62914]: DEBUG nova.compute.manager [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Received event network-vif-plugged-13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 716.518506] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Acquiring lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.518746] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.518923] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.519146] env[62914]: DEBUG nova.compute.manager [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] No waiting events found dispatching network-vif-plugged-13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 716.519622] env[62914]: WARNING nova.compute.manager [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Received unexpected event network-vif-plugged-13fa09d1-da09-4a01-bc28-c7fd25efb778 for instance with vm_state building and task_state spawning. [ 716.519622] env[62914]: DEBUG nova.compute.manager [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Received event network-changed-13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 716.519800] env[62914]: DEBUG nova.compute.manager [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Refreshing instance network info cache due to event network-changed-13fa09d1-da09-4a01-bc28-c7fd25efb778. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 716.519903] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Acquiring lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.551485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da64fea4-ba65-4dc4-848e-cf2394bc5a02 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "9ce44ae9-9369-4c0c-9d14-9c8fde42d612" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.705s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.552589] env[62914]: DEBUG oslo_concurrency.lockutils [None req-db035b21-cfc1-4352-ae0a-8a04681b0c74 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "d8d08c36-bec2-4117-9352-8e148d25dc9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.449s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.642913] env[62914]: DEBUG nova.network.neutron [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Updating instance_info_cache with network_info: [{"id": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "address": "fa:16:3e:22:7a:1f", "network": {"id": "6ddc27d6-5081-4b69-a277-40eff032c384", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1412777375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9eecd9c2bf4a449e9c31baecc40fe4a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fa09d1-da", "ovs_interfaceid": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.714936] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 716.732911] env[62914]: DEBUG oslo_vmware.api [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831616, 'name': PowerOnVM_Task, 'duration_secs': 0.947547} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.735183] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 716.735448] env[62914]: INFO nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Took 9.75 seconds to spawn the instance on the hypervisor. [ 716.735643] env[62914]: DEBUG nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 716.736854] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17aa71f6-bc19-4352-b4bc-8166e545435b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.788614] env[62914]: DEBUG nova.policy [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e291489da35649d0a2c69f98714d89ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea39ac6e2d400ca89bbffc20d764ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 717.009029] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 717.109297] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Successfully updated port: 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 717.147811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Releasing lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.148197] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Instance network_info: |[{"id": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "address": "fa:16:3e:22:7a:1f", "network": {"id": "6ddc27d6-5081-4b69-a277-40eff032c384", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1412777375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9eecd9c2bf4a449e9c31baecc40fe4a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fa09d1-da", "ovs_interfaceid": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 717.148780] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Acquired lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.148990] env[62914]: DEBUG nova.network.neutron [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Refreshing network info cache for port 13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.150809] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:7a:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13fa09d1-da09-4a01-bc28-c7fd25efb778', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.162039] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Creating folder: Project (9eecd9c2bf4a449e9c31baecc40fe4a0). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.162039] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9771d5ee-d42a-4392-91d3-13d38cb727ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.173847] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Created folder: Project (9eecd9c2bf4a449e9c31baecc40fe4a0) in parent group-v941773. [ 717.173847] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Creating folder: Instances. Parent ref: group-v941885. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.173939] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67d2f266-1542-4ab0-a431-135d1d71d489 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.185714] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Created folder: Instances in parent group-v941885. [ 717.185900] env[62914]: DEBUG oslo.service.loopingcall [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.186129] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 717.186384] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b91793a2-e025-4d6b-8552-063166fb4186 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.209204] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.209204] env[62914]: value = "task-4831619" [ 717.209204] env[62914]: _type = "Task" [ 717.209204] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.222451] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Successfully created port: 48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.225534] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831619, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.228585] env[62914]: INFO nova.compute.manager [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Rebuilding instance [ 717.262797] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.271037] env[62914]: INFO nova.compute.manager [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Took 47.02 seconds to build instance. [ 717.298211] env[62914]: DEBUG nova.compute.manager [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 717.299234] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbd9441-0d20-433d-ac58-6c163e2c9152 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.542206] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6812212b-301a-4b06-85b1-bc6bba14188c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.553249] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9190f53e-a195-455e-a49e-5cea612f1eb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.592225] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94568fea-5edc-4bab-bd2e-d29a8ed23870 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.598094] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "29a177e4-b5d7-4249-8fc5-2316f6891536" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.598366] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.604530] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21123f26-e64e-4ac4-89de-012a4825780c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.620217] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.620387] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquired lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.620534] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.622869] env[62914]: DEBUG nova.compute.provider_tree [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.725158] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831619, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.781699] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c0c9db5-20d7-49cb-b5fa-752b34111a59 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.027s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.817603] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 717.817603] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-589b6662-0183-4473-afef-da1c9ba8c015 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.826431] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 717.826431] env[62914]: value = "task-4831620" [ 717.826431] env[62914]: _type = "Task" [ 717.826431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.843367] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.917583] env[62914]: DEBUG nova.network.neutron [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Updated VIF entry in instance network info cache for port 13fa09d1-da09-4a01-bc28-c7fd25efb778. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 717.918683] env[62914]: DEBUG nova.network.neutron [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Updating instance_info_cache with network_info: [{"id": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "address": "fa:16:3e:22:7a:1f", "network": {"id": "6ddc27d6-5081-4b69-a277-40eff032c384", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1412777375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9eecd9c2bf4a449e9c31baecc40fe4a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fa09d1-da", "ovs_interfaceid": "13fa09d1-da09-4a01-bc28-c7fd25efb778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.936765] env[62914]: DEBUG nova.compute.manager [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Received event network-vif-plugged-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 717.938022] env[62914]: DEBUG oslo_concurrency.lockutils [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] Acquiring lock "4496a977-30b2-4323-a561-884633958cdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.938022] env[62914]: DEBUG oslo_concurrency.lockutils [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] Lock "4496a977-30b2-4323-a561-884633958cdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.938022] env[62914]: DEBUG oslo_concurrency.lockutils [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] Lock "4496a977-30b2-4323-a561-884633958cdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.938022] env[62914]: DEBUG nova.compute.manager [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] No waiting events found dispatching network-vif-plugged-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 717.938022] env[62914]: WARNING nova.compute.manager [req-9c66678c-1b4f-476b-bd53-a9c0b531b5a4 req-0ac9b241-564b-458b-a998-e5e189c39bf9 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Received unexpected event network-vif-plugged-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 for instance with vm_state building and task_state spawning. [ 718.027184] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 718.054492] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 718.054764] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 718.054925] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.056493] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 718.056678] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.056851] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 718.057094] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 718.057386] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 718.058417] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 718.058417] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 718.058417] env[62914]: DEBUG nova.virt.hardware [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.058912] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2eaf7e8-7010-4b9f-89e4-39fac9ab0afb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.069828] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a26685-1cbe-4e7b-b973-3bf3b1a01417 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.128198] env[62914]: DEBUG nova.scheduler.client.report [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 718.166613] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.228483] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831619, 'name': CreateVM_Task, 'duration_secs': 0.714044} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.228901] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 718.229469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.229660] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.229998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.230354] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d7c3b1-12a4-43ad-b4a3-a9b82a0b7d17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.236646] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 718.236646] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a5915-99d0-9dc0-e133-508178951c20" [ 718.236646] env[62914]: _type = "Task" [ 718.236646] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.252309] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a5915-99d0-9dc0-e133-508178951c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.284418] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 718.345196] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831620, 'name': PowerOffVM_Task, 'duration_secs': 0.23342} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.348089] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 718.348797] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 718.349355] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a70212-bf72-4404-8b06-cc44164b629c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.359185] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 718.359422] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1e7a6bb-9b9c-49ef-af3c-14c445b15402 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.362220] env[62914]: DEBUG nova.network.neutron [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updating instance_info_cache with network_info: [{"id": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "address": "fa:16:3e:de:7a:42", "network": {"id": "ecf0d16a-6826-48e3-a9ed-8114774cf045", "bridge": "br-int", "label": "tempest-ServersTestJSON-657617940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5214037507114e7eafb35779261bde06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eb60abb-6f", "ovs_interfaceid": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.392955] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 718.392955] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 718.392955] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Deleting the datastore file [datastore2] cead3557-080d-4956-a957-cac449bb69f6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 718.392955] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-313754dc-b888-4415-926c-53b593b58be2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.401366] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 718.401366] env[62914]: value = "task-4831622" [ 718.401366] env[62914]: _type = "Task" [ 718.401366] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.412714] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.423195] env[62914]: DEBUG oslo_concurrency.lockutils [req-a1bb0608-5999-40d6-b90a-b96b4a9befa2 req-7c5cf16f-ae7a-4c6b-b542-3a7a2be3cceb service nova] Releasing lock "refresh_cache-bf2e9634-66ee-4b6a-a148-bc77420d793f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.639408] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.641034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.309s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.643109] env[62914]: INFO nova.compute.claims [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.753499] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a5915-99d0-9dc0-e133-508178951c20, 'name': SearchDatastore_Task, 'duration_secs': 0.022663} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.753942] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.754344] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.754690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.754940] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.755225] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.755601] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2cc2dbc-815c-4c97-85d4-a55a6e8f94c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.772927] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.773138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 718.773979] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8a4b0df-9e46-4a35-bbe5-54cd2eccbc06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.783206] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 718.783206] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521485f9-0b0d-55f6-8e6f-a84d484a9001" [ 718.783206] env[62914]: _type = "Task" [ 718.783206] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.796985] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521485f9-0b0d-55f6-8e6f-a84d484a9001, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.820642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.866137] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Releasing lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.866778] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Instance network_info: |[{"id": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "address": "fa:16:3e:de:7a:42", "network": {"id": "ecf0d16a-6826-48e3-a9ed-8114774cf045", "bridge": "br-int", "label": "tempest-ServersTestJSON-657617940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5214037507114e7eafb35779261bde06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eb60abb-6f", "ovs_interfaceid": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 718.866913] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:7a:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '81d39ad2-4e62-4f09-a567-88ac5aa70467', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.877356] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Creating folder: Project (5214037507114e7eafb35779261bde06). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 718.877627] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e87aea3-8d3c-432b-8a23-975a42e7dc67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.897182] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Created folder: Project (5214037507114e7eafb35779261bde06) in parent group-v941773. [ 718.897443] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Creating folder: Instances. Parent ref: group-v941888. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 718.897718] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c16acb9-902d-4523-99f4-60e9e8f72f50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.909801] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152216} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.914018] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 718.914018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 718.914018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 718.914288] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Created folder: Instances in parent group-v941888. [ 718.914320] env[62914]: DEBUG oslo.service.loopingcall [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.919854] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 718.919854] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fa74c36-55b0-4f56-aa7c-cb9e8479b54a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.937147] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.937147] env[62914]: value = "task-4831625" [ 718.937147] env[62914]: _type = "Task" [ 718.937147] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.945897] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831625, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.972546] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Successfully updated port: 48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.028371] env[62914]: DEBUG nova.compute.manager [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Received event network-changed-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 719.028664] env[62914]: DEBUG nova.compute.manager [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Refreshing instance network info cache due to event network-changed-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 719.028827] env[62914]: DEBUG oslo_concurrency.lockutils [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] Acquiring lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.028964] env[62914]: DEBUG oslo_concurrency.lockutils [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] Acquired lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.029179] env[62914]: DEBUG nova.network.neutron [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Refreshing network info cache for port 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.093860] env[62914]: DEBUG nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 719.094462] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5d816d-47b6-432b-bdd9-938604691cd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.151923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "624ffc8c-2c06-4d0f-9bed-ed792bb4af00" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.152608] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "624ffc8c-2c06-4d0f-9bed-ed792bb4af00" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.300596] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521485f9-0b0d-55f6-8e6f-a84d484a9001, 'name': SearchDatastore_Task, 'duration_secs': 0.03068} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.301588] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfcdb4ba-1c3a-4510-a33b-fa9e02b0b533 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.312030] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 719.312030] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6fd6-6e5c-7b85-8ebb-19779dec65e9" [ 719.312030] env[62914]: _type = "Task" [ 719.312030] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.322152] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6fd6-6e5c-7b85-8ebb-19779dec65e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.364906] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.365515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.454520] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831625, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.484074] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.484074] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.484074] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.609877] env[62914]: INFO nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] instance snapshotting [ 719.611604] env[62914]: DEBUG nova.objects.instance [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.657216] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "624ffc8c-2c06-4d0f-9bed-ed792bb4af00" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.657216] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 719.687018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.687018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.687218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.690812] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.690812] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.694589] env[62914]: INFO nova.compute.manager [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Terminating instance [ 719.702597] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d1fabd-6aed-4b23-a598-19ba6531137a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.706168] env[62914]: DEBUG nova.compute.manager [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 719.706168] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 719.707282] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f0cf82-8e38-4d87-87dd-67efb017389a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.721101] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Suspending the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 719.721485] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 719.723644] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d021ae09-7c53-4103-b5b8-ba445bb5c00f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.724871] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2059339-ec2a-4d9b-8806-217d38227452 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.727779] env[62914]: DEBUG nova.compute.manager [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Received event network-changed-df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 719.727979] env[62914]: DEBUG nova.compute.manager [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Refreshing instance network info cache due to event network-changed-df40a274-9dea-4b4c-be39-6e7556e77032. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 719.732035] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] Acquiring lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.732035] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] Acquired lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.732035] env[62914]: DEBUG nova.network.neutron [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Refreshing network info cache for port df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.743383] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 719.743383] env[62914]: value = "task-4831626" [ 719.743383] env[62914]: _type = "Task" [ 719.743383] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.745157] env[62914]: DEBUG oslo_vmware.api [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 719.745157] env[62914]: value = "task-4831627" [ 719.745157] env[62914]: _type = "Task" [ 719.745157] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.774121] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.774623] env[62914]: DEBUG oslo_vmware.api [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831627, 'name': SuspendVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.831354] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6fd6-6e5c-7b85-8ebb-19779dec65e9, 'name': SearchDatastore_Task, 'duration_secs': 0.024555} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.835891] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.836791] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] bf2e9634-66ee-4b6a-a148-bc77420d793f/bf2e9634-66ee-4b6a-a148-bc77420d793f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 719.837713] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95277803-4d2b-4d25-bbfd-dc26f8921a90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.849759] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 719.849759] env[62914]: value = "task-4831628" [ 719.849759] env[62914]: _type = "Task" [ 719.849759] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.869079] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.936957] env[62914]: DEBUG nova.network.neutron [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updated VIF entry in instance network info cache for port 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 719.937927] env[62914]: DEBUG nova.network.neutron [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updating instance_info_cache with network_info: [{"id": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "address": "fa:16:3e:de:7a:42", "network": {"id": "ecf0d16a-6826-48e3-a9ed-8114774cf045", "bridge": "br-int", "label": "tempest-ServersTestJSON-657617940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5214037507114e7eafb35779261bde06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eb60abb-6f", "ovs_interfaceid": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.956020] env[62914]: DEBUG oslo_concurrency.lockutils [req-58d39994-a36f-47e2-8ed0-303f0f3d0f09 req-07ad7a94-9d36-4994-ab86-de2f4098ae79 service nova] Releasing lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.971178] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831625, 'name': CreateVM_Task, 'duration_secs': 0.66353} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.974244] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 719.981014] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 719.981730] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 719.982346] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.982958] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 719.987054] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.987828] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 719.987957] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 719.988209] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 719.988554] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 719.988880] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 719.989232] env[62914]: DEBUG nova.virt.hardware [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.990949] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.991018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.991493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 719.996526] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6665d0e-f971-4560-b3da-12fe0e1bae5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.000553] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfcd37e5-8592-45ac-9608-ec2b067b21d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.007433] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 720.007433] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263918d-9def-d02d-0b33-8ccfa72cbf33" [ 720.007433] env[62914]: _type = "Task" [ 720.007433] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.015399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1463ee08-2d28-4a6d-8dbb-d17aa5bcf9ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.028898] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263918d-9def-d02d-0b33-8ccfa72cbf33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.037748] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 720.043684] env[62914]: DEBUG oslo.service.loopingcall [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.046995] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 720.047972] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.050140] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-363db96d-be79-487e-899e-1e37d09f50a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.075786] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 720.075786] env[62914]: value = "task-4831629" [ 720.075786] env[62914]: _type = "Task" [ 720.075786] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.086967] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831629, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.119682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b35a9e1-23e6-4c69-90ef-f785a3bff98a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.151457] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b1ca9c-4f51-40fd-837a-4b044287a997 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.165810] env[62914]: DEBUG nova.compute.utils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.171204] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 720.172240] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 720.224266] env[62914]: DEBUG nova.policy [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5a274ff69c54b8b9e7dcbd453c2b41f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92e65b92475a4d0ab57070424a4eebff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.270178] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831626, 'name': PowerOffVM_Task, 'duration_secs': 0.332404} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.273434] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 720.273639] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 720.274280] env[62914]: DEBUG oslo_vmware.api [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831627, 'name': SuspendVM_Task} progress is 54%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.277036] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11a2b734-78fb-43b7-ba6c-b5a884e5c307 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.344101] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75841256-6c90-4238-b7d7-83495fc0087b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.358489] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5533ba4-503a-440d-9367-5eea24ae3cb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.369570] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 720.369817] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 720.370033] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleting the datastore file [datastore2] 3eff61b1-b09c-4a04-821c-cefdc7be3f64 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.370997] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0842378f-23a5-4de2-8d1c-da13b82ef3fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.376652] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.403813] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf67e2cc-16aa-4164-9782-965ed5a9242e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.409687] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for the task: (returnval){ [ 720.409687] env[62914]: value = "task-4831631" [ 720.409687] env[62914]: _type = "Task" [ 720.409687] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.416183] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7a04b2-1639-468b-a8e7-b4584d478532 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.426151] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.436020] env[62914]: DEBUG nova.compute.provider_tree [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.481981] env[62914]: DEBUG nova.network.neutron [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Updating instance_info_cache with network_info: [{"id": "48f699bf-5203-47d2-88d5-9747169234ea", "address": "fa:16:3e:a1:fe:a4", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48f699bf-52", "ovs_interfaceid": "48f699bf-5203-47d2-88d5-9747169234ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.535062] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263918d-9def-d02d-0b33-8ccfa72cbf33, 'name': SearchDatastore_Task, 'duration_secs': 0.057672} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.535504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.535813] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.536103] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.538016] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.538016] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.538016] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe5fd3c8-c774-4b76-86fd-489eaaa3c14e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.561540] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.561859] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 720.562813] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5344f66-da40-4a61-9dc7-510fee7a4475 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.570827] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 720.570827] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a4b51-96db-8347-e16e-b5e0e0091562" [ 720.570827] env[62914]: _type = "Task" [ 720.570827] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.584455] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a4b51-96db-8347-e16e-b5e0e0091562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.598091] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831629, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.665433] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 720.669030] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fbf9bbee-c583-4fb5-af47-a5509a22c0ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.675859] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 720.686398] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 720.686398] env[62914]: value = "task-4831632" [ 720.686398] env[62914]: _type = "Task" [ 720.686398] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.711940] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831632, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.713564] env[62914]: DEBUG nova.network.neutron [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updated VIF entry in instance network info cache for port df40a274-9dea-4b4c-be39-6e7556e77032. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 720.714098] env[62914]: DEBUG nova.network.neutron [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updating instance_info_cache with network_info: [{"id": "df40a274-9dea-4b4c-be39-6e7556e77032", "address": "fa:16:3e:b0:58:25", "network": {"id": "f81a07e3-8a2d-4ae8-afdd-0fcf0588d2d0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-312614933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a787e2eca26741aa951dc2ef5c507766", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf40a274-9d", "ovs_interfaceid": "df40a274-9dea-4b4c-be39-6e7556e77032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.790566] env[62914]: DEBUG oslo_vmware.api [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831627, 'name': SuspendVM_Task, 'duration_secs': 0.995581} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.791694] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Suspended the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 720.791781] env[62914]: DEBUG nova.compute.manager [None req-231b7624-ad4b-4436-a747-e66532532320 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 720.792649] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee98ff03-a728-4976-948a-bca748cceb27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.845246] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Successfully created port: 6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.862624] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.924903] env[62914]: DEBUG oslo_vmware.api [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Task: {'id': task-4831631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405874} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.925412] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 720.925556] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 720.925806] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 720.926250] env[62914]: INFO nova.compute.manager [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Took 1.22 seconds to destroy the instance on the hypervisor. [ 720.926544] env[62914]: DEBUG oslo.service.loopingcall [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.926775] env[62914]: DEBUG nova.compute.manager [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 720.926871] env[62914]: DEBUG nova.network.neutron [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.943623] env[62914]: DEBUG nova.scheduler.client.report [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.987329] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.987602] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance network_info: |[{"id": "48f699bf-5203-47d2-88d5-9747169234ea", "address": "fa:16:3e:a1:fe:a4", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48f699bf-52", "ovs_interfaceid": "48f699bf-5203-47d2-88d5-9747169234ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 720.988864] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:fe:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48f699bf-5203-47d2-88d5-9747169234ea', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 720.998224] env[62914]: DEBUG oslo.service.loopingcall [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.998900] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 720.999179] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da050654-b510-4560-a00b-6a7dd5317eb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.030848] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.030848] env[62914]: value = "task-4831633" [ 721.030848] env[62914]: _type = "Task" [ 721.030848] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.045442] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831633, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.084619] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a4b51-96db-8347-e16e-b5e0e0091562, 'name': SearchDatastore_Task, 'duration_secs': 0.063685} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.086115] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13e2d472-6567-42e1-9c98-4a0121e25740 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.092441] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831629, 'name': CreateVM_Task, 'duration_secs': 0.609576} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.094746] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 721.094746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.094746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.094746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 721.094746] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99fc7555-08b3-41c2-981f-b9e3c9a0207f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.097896] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 721.097896] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523511e0-4490-f8df-e420-d21e11703487" [ 721.097896] env[62914]: _type = "Task" [ 721.097896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.103708] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 721.103708] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5267e328-bf5d-00b1-2a8f-1355bf185ab7" [ 721.103708] env[62914]: _type = "Task" [ 721.103708] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.110514] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523511e0-4490-f8df-e420-d21e11703487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.115538] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5267e328-bf5d-00b1-2a8f-1355bf185ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.207501] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831632, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.221566] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b0f12c7-356e-43fd-974d-87d6d46e5ea4 req-463ef1d2-122d-4386-8d25-3235cfbb573f service nova] Releasing lock "refresh_cache-fed831e0-4518-4025-89b1-7f6b644e013d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.361155] env[62914]: DEBUG nova.compute.manager [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Received event network-vif-plugged-48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 721.361460] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Acquiring lock "5a704020-921e-4ede-9fd9-b745c027a158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.361576] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Lock "5a704020-921e-4ede-9fd9-b745c027a158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.361751] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Lock "5a704020-921e-4ede-9fd9-b745c027a158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.361925] env[62914]: DEBUG nova.compute.manager [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] No waiting events found dispatching network-vif-plugged-48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 721.362108] env[62914]: WARNING nova.compute.manager [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Received unexpected event network-vif-plugged-48f699bf-5203-47d2-88d5-9747169234ea for instance with vm_state building and task_state spawning. [ 721.362271] env[62914]: DEBUG nova.compute.manager [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Received event network-changed-48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 721.362426] env[62914]: DEBUG nova.compute.manager [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Refreshing instance network info cache due to event network-changed-48f699bf-5203-47d2-88d5-9747169234ea. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 721.362989] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Acquiring lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.363185] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Acquired lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.363356] env[62914]: DEBUG nova.network.neutron [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Refreshing network info cache for port 48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 721.372747] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.451192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.451192] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 721.451838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.973s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.455023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.455023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.839s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.455023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.457276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.869s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.457503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.459661] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.319s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.459912] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.461907] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.069s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.462186] env[62914]: DEBUG nova.objects.instance [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'resources' on Instance uuid 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.516189] env[62914]: INFO nova.scheduler.client.report [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Deleted allocations for instance 934a0ca3-d879-4b23-90fe-2c190c201a88 [ 721.520285] env[62914]: INFO nova.scheduler.client.report [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Deleted allocations for instance a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494 [ 721.538228] env[62914]: INFO nova.scheduler.client.report [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Deleted allocations for instance ff2cff97-1671-4f97-8f69-532253169ff8 [ 721.540530] env[62914]: INFO nova.scheduler.client.report [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Deleted allocations for instance 4fbb08f0-6712-4e78-b9da-b33a812ec9b7 [ 721.550666] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831633, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.618078] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5267e328-bf5d-00b1-2a8f-1355bf185ab7, 'name': SearchDatastore_Task, 'duration_secs': 0.039304} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.624579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.624864] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 721.625095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.625356] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523511e0-4490-f8df-e420-d21e11703487, 'name': SearchDatastore_Task, 'duration_secs': 0.041607} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.625924] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.626324] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4496a977-30b2-4323-a561-884633958cdf/4496a977-30b2-4323-a561-884633958cdf.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 721.626620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.626850] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 721.627085] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef1fd577-58c7-4cd5-9aab-03cb2c2be566 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.629931] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e29c7b05-5df4-4980-8577-ab5968f3bc78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.642181] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 721.642181] env[62914]: value = "task-4831634" [ 721.642181] env[62914]: _type = "Task" [ 721.642181] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.643215] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 721.643393] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 721.648652] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d221d97-a2f8-47e9-aa7f-4c9aef09d3f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.659062] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.659734] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 721.659734] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263ebc6-0d7d-6610-a3f6-4e6620a88e7b" [ 721.659734] env[62914]: _type = "Task" [ 721.659734] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.671811] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263ebc6-0d7d-6610-a3f6-4e6620a88e7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.706021] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 721.713967] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831632, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.744961] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.745222] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.745380] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.745567] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.745784] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.745952] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.746184] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.746343] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.746509] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.746670] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.746840] env[62914]: DEBUG nova.virt.hardware [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.747765] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5558e57f-7e53-4143-a5da-e7b5668bd8cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.756493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7e7b1a-c8d5-4859-abd3-2904b76878a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.853464] env[62914]: DEBUG nova.network.neutron [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.865539] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.969139] env[62914]: DEBUG nova.compute.utils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.975911] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 721.976233] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 722.033673] env[62914]: DEBUG oslo_concurrency.lockutils [None req-55d25424-7aca-4b14-b096-35ac352d99dc tempest-AttachInterfacesV270Test-2074000085 tempest-AttachInterfacesV270Test-2074000085-project-member] Lock "934a0ca3-d879-4b23-90fe-2c190c201a88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.745s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.035759] env[62914]: DEBUG oslo_concurrency.lockutils [None req-46b716ed-2097-42ed-8db8-066ff85c5c02 tempest-VolumesAssistedSnapshotsTest-1541736566 tempest-VolumesAssistedSnapshotsTest-1541736566-project-member] Lock "a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.381s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.050425] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831633, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.060278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f2bdc31-7520-488e-8833-fbfc1eabea22 tempest-ServerTagsTestJSON-331705066 tempest-ServerTagsTestJSON-331705066-project-member] Lock "ff2cff97-1671-4f97-8f69-532253169ff8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.610s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.066193] env[62914]: DEBUG oslo_concurrency.lockutils [None req-53bfaca2-4902-4486-b436-3bab427ff788 tempest-ServerAddressesNegativeTestJSON-977920271 tempest-ServerAddressesNegativeTestJSON-977920271-project-member] Lock "4fbb08f0-6712-4e78-b9da-b33a812ec9b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.708s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.071972] env[62914]: DEBUG nova.policy [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ddee8b8fb0e4ffea8e02b2b9f7a9846', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df7ae349aea0487d88689eb09933eb1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.129126] env[62914]: DEBUG nova.compute.manager [req-4c91d452-034b-4847-946f-0012539ac2b7 req-a018c0e4-244d-4728-b4f5-6a1711c27978 service nova] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Received event network-vif-deleted-59265cbb-d823-43dc-a07d-d850de95a7d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 722.156735] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831634, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.170893] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5263ebc6-0d7d-6610-a3f6-4e6620a88e7b, 'name': SearchDatastore_Task, 'duration_secs': 0.019506} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.175169] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f376087-5440-446c-9552-caba950c62b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.184291] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 722.184291] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e8ff2-49e7-e138-3f04-2d5ad89fa842" [ 722.184291] env[62914]: _type = "Task" [ 722.184291] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.202700] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e8ff2-49e7-e138-3f04-2d5ad89fa842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.217787] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831632, 'name': CreateSnapshot_Task, 'duration_secs': 1.374268} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.220899] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 722.222505] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164be702-81de-4183-a433-dc6d5e647c63 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.361316] env[62914]: INFO nova.compute.manager [-] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Took 1.43 seconds to deallocate network for instance. [ 722.374318] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831628, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.110857} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.374647] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] bf2e9634-66ee-4b6a-a148-bc77420d793f/bf2e9634-66ee-4b6a-a148-bc77420d793f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 722.374897] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.375203] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-912ec893-0b70-45bd-b08a-fb844725d4d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.384162] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 722.384162] env[62914]: value = "task-4831635" [ 722.384162] env[62914]: _type = "Task" [ 722.384162] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.395560] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.478526] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 722.531149] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c26546-6d64-4084-9cd6-afc954a40078 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.545483] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e44cc4-cb68-405f-b6b6-e7e17427ab2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.554227] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831633, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.586815] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460f833b-014e-4d9a-8229-99417593c4fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.597032] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bdc67d-2f79-4fad-87ec-914f74e83d6d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.618765] env[62914]: DEBUG nova.compute.provider_tree [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.659018] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831634, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.698192] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e8ff2-49e7-e138-3f04-2d5ad89fa842, 'name': SearchDatastore_Task, 'duration_secs': 0.016755} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.698780] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.698780] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 722.699086] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a3afad3-b25c-47fb-983c-7c2e4ecf67a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.715293] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 722.715293] env[62914]: value = "task-4831636" [ 722.715293] env[62914]: _type = "Task" [ 722.715293] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.722080] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.747855] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 722.749170] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-87d57269-99c1-4e52-ba41-2b8fcc106a7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.758395] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 722.758395] env[62914]: value = "task-4831637" [ 722.758395] env[62914]: _type = "Task" [ 722.758395] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.770168] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831637, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.789521] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Successfully created port: f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.870175] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.896795] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171506} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.897260] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.899223] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc76e44c-a09f-4fa7-9b7e-77c4c83116ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.924461] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] bf2e9634-66ee-4b6a-a148-bc77420d793f/bf2e9634-66ee-4b6a-a148-bc77420d793f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.927409] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc0acde7-a9ff-43d2-a964-189abe588213 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.944241] env[62914]: DEBUG nova.network.neutron [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Updated VIF entry in instance network info cache for port 48f699bf-5203-47d2-88d5-9747169234ea. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 722.944655] env[62914]: DEBUG nova.network.neutron [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Updating instance_info_cache with network_info: [{"id": "48f699bf-5203-47d2-88d5-9747169234ea", "address": "fa:16:3e:a1:fe:a4", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48f699bf-52", "ovs_interfaceid": "48f699bf-5203-47d2-88d5-9747169234ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.954968] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 722.954968] env[62914]: value = "task-4831638" [ 722.954968] env[62914]: _type = "Task" [ 722.954968] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.970101] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831638, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.049170] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831633, 'name': CreateVM_Task, 'duration_secs': 1.758503} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.049892] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 723.050733] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.050922] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.051393] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 723.051680] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e7536d3-9d8c-4bbc-8463-8e857ba5579e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.061698] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 723.061698] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c652da-eed4-52a9-e675-0ae6ee62000f" [ 723.061698] env[62914]: _type = "Task" [ 723.061698] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.072778] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c652da-eed4-52a9-e675-0ae6ee62000f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.125050] env[62914]: DEBUG nova.scheduler.client.report [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.141605] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Successfully updated port: 6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.159396] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831634, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.02116} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.159813] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4496a977-30b2-4323-a561-884633958cdf/4496a977-30b2-4323-a561-884633958cdf.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 723.160046] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 723.160333] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fae54e27-e6a1-4fef-89b3-35984471ecf9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.172161] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 723.172161] env[62914]: value = "task-4831639" [ 723.172161] env[62914]: _type = "Task" [ 723.172161] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.191376] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831639, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.225821] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831636, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.272602] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831637, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.448286] env[62914]: DEBUG nova.compute.manager [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 723.448286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cdc70a-2741-419f-b8e9-05ce5ebecd96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.452453] env[62914]: DEBUG oslo_concurrency.lockutils [req-fd2b5ca9-b5ff-4706-842f-93ad18f91c86 req-0f7048e7-0cdf-4889-b720-21f2de8074b7 service nova] Releasing lock "refresh_cache-5a704020-921e-4ede-9fd9-b745c027a158" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.479146] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.494336] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 723.529578] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 723.529933] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 723.530217] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.530539] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 723.530899] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.531460] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 723.531538] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 723.531818] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 723.536107] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 723.536538] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 723.536917] env[62914]: DEBUG nova.virt.hardware [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.538113] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f56e8e-682b-42f7-98bf-801b1397c7ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.550546] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a0ccd6-8964-4665-834a-af7365e39375 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.581808] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c652da-eed4-52a9-e675-0ae6ee62000f, 'name': SearchDatastore_Task, 'duration_secs': 0.019508} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.582169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.582420] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.582668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.582822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.583014] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.583325] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c639183-c7a5-487c-a2ea-624377a3b37b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.593642] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.593949] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 723.595007] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df2640a4-e65f-4937-9289-374638bb2fef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.604016] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 723.604016] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238d65d-94c5-e4e5-117b-6479a5014f44" [ 723.604016] env[62914]: _type = "Task" [ 723.604016] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.609712] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238d65d-94c5-e4e5-117b-6479a5014f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.634938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.637704] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.758s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.639536] env[62914]: INFO nova.compute.claims [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.645172] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.645172] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquired lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.645172] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 723.685906] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831639, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.686240] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 723.687126] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516325ba-29da-4cc1-80b6-f56e93527fec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.692405] env[62914]: INFO nova.scheduler.client.report [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted allocations for instance 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 [ 723.722055] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 4496a977-30b2-4323-a561-884633958cdf/4496a977-30b2-4323-a561-884633958cdf.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 723.722055] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50e37496-ad01-4298-af0a-5c185d935257 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.748396] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831636, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.814101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.752656] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 723.752947] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 723.753865] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 723.753865] env[62914]: value = "task-4831640" [ 723.753865] env[62914]: _type = "Task" [ 723.753865] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.753865] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-593f20b2-00cc-4532-8d4e-e290343bc75b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.768823] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 723.768823] env[62914]: value = "task-4831641" [ 723.768823] env[62914]: _type = "Task" [ 723.768823] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.774998] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.782411] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831637, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.790892] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831641, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.976022] env[62914]: INFO nova.compute.manager [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] instance snapshotting [ 723.976022] env[62914]: WARNING nova.compute.manager [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 723.976022] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831638, 'name': ReconfigVM_Task, 'duration_secs': 0.878109} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.976022] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Reconfigured VM instance instance-00000024 to attach disk [datastore1] bf2e9634-66ee-4b6a-a148-bc77420d793f/bf2e9634-66ee-4b6a-a148-bc77420d793f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.976844] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04f20b7-0f95-41e6-8032-eb35e9c67329 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.979806] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce716de3-396d-425f-baa7-5e812d6fbca2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.006927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4932e5-064d-41bd-9a54-c52bfee2fc82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.010010] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 724.010010] env[62914]: value = "task-4831642" [ 724.010010] env[62914]: _type = "Task" [ 724.010010] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.025887] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831642, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.112641] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238d65d-94c5-e4e5-117b-6479a5014f44, 'name': SearchDatastore_Task, 'duration_secs': 0.019151} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.113559] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdc8c878-22d9-4faf-a57a-1a64ffd95fc8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.120257] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 724.120257] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259862f-1aab-8488-b511-3b9f2bf76fad" [ 724.120257] env[62914]: _type = "Task" [ 724.120257] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.130267] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259862f-1aab-8488-b511-3b9f2bf76fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.226869] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bf262e5d-a35d-4ab5-a1db-1170d50ce125 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.818s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.228148] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Acquired lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.231616] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9387d8-c7df-43e3-a569-4fd285b663db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.243605] env[62914]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 724.243949] env[62914]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62914) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 724.244479] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a15871ee-38f4-491a-929f-f9acc1333877 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.258242] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516793e6-bd1d-4899-957c-b34b088cdfb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.286597] env[62914]: DEBUG nova.compute.manager [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Received event network-vif-plugged-6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 724.286597] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Acquiring lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.287501] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.287501] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.287501] env[62914]: DEBUG nova.compute.manager [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] No waiting events found dispatching network-vif-plugged-6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 724.287732] env[62914]: WARNING nova.compute.manager [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Received unexpected event network-vif-plugged-6fb0a3a2-3560-49e6-85bc-32473f0b2985 for instance with vm_state building and task_state spawning. [ 724.287865] env[62914]: DEBUG nova.compute.manager [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Received event network-changed-6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 724.288077] env[62914]: DEBUG nova.compute.manager [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Refreshing instance network info cache due to event network-changed-6fb0a3a2-3560-49e6-85bc-32473f0b2985. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 724.288223] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Acquiring lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.323146] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831637, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.323801] env[62914]: ERROR root [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-941820' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-941820' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-941820' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-941820'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-941820' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-941820' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-941820'}\n"]: nova.exception.InstanceNotFound: Instance 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 could not be found. [ 724.324166] env[62914]: DEBUG oslo_concurrency.lockutils [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] Releasing lock "6fd5f3b8-1175-4bd5-b0b4-12517ba65271" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.324501] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Detach interface failed, port_id=ec98f693-b488-485c-8165-c736ecc6b3d7, reason: Instance 6fd5f3b8-1175-4bd5-b0b4-12517ba65271 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 724.324630] env[62914]: DEBUG nova.compute.manager [req-e22fa865-4ef4-4777-abaa-59420d89f4a8 req-82bedb29-1cec-49c5-a42d-56a7ea4f9739 service nova] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Received event network-vif-deleted-a37b3d57-45a7-4167-970b-4734a54661f8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 724.325353] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.331367] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.334126] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831641, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.287352} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.334503] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.335825] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5de317-7512-4a0f-97fc-ce08edac5b97 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.361120] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.361756] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0a11447-0aab-4a58-8cfa-53bfd56cdc32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.385442] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 724.385442] env[62914]: value = "task-4831643" [ 724.385442] env[62914]: _type = "Task" [ 724.385442] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.398117] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.524307] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 724.524307] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831642, 'name': Rename_Task, 'duration_secs': 0.288548} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.524307] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-afb46197-bc3b-4874-ae2d-b1ad21d64bc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.526420] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 724.526714] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d311467-f970-4ad6-957b-3ceea7e91942 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.537026] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 724.537026] env[62914]: value = "task-4831644" [ 724.537026] env[62914]: _type = "Task" [ 724.537026] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.537669] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 724.537669] env[62914]: value = "task-4831645" [ 724.537669] env[62914]: _type = "Task" [ 724.537669] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.550842] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.554666] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831644, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.633160] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259862f-1aab-8488-b511-3b9f2bf76fad, 'name': SearchDatastore_Task, 'duration_secs': 0.019996} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.633842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.634164] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 724.634488] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c3fc722-d307-4ed0-9284-db3c51036797 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.643498] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 724.643498] env[62914]: value = "task-4831646" [ 724.643498] env[62914]: _type = "Task" [ 724.643498] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.656227] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.807494] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831637, 'name': CloneVM_Task, 'duration_secs': 1.851053} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.807494] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831640, 'name': ReconfigVM_Task, 'duration_secs': 0.652658} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.809033] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Created linked-clone VM from snapshot [ 724.809033] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 4496a977-30b2-4323-a561-884633958cdf/4496a977-30b2-4323-a561-884633958cdf.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 724.810437] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efd1009-924d-4801-baae-419c572469aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.816319] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-868ca035-dab4-444f-8caf-c68b682c5918 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.832106] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploading image 43e1e672-0912-4810-8ce8-f395286e9ba9 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 724.837147] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 724.837147] env[62914]: value = "task-4831647" [ 724.837147] env[62914]: _type = "Task" [ 724.837147] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.850136] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831647, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.869582] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 724.869582] env[62914]: value = "vm-941894" [ 724.869582] env[62914]: _type = "VirtualMachine" [ 724.869582] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 724.869582] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-556c733f-d3b8-479f-b255-0ba32446fcd9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.877940] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease: (returnval){ [ 724.877940] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bf4fa-d5b4-9413-d993-dc9922cfba8e" [ 724.877940] env[62914]: _type = "HttpNfcLease" [ 724.877940] env[62914]: } obtained for exporting VM: (result){ [ 724.877940] env[62914]: value = "vm-941894" [ 724.877940] env[62914]: _type = "VirtualMachine" [ 724.877940] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 724.878710] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the lease: (returnval){ [ 724.878710] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bf4fa-d5b4-9413-d993-dc9922cfba8e" [ 724.878710] env[62914]: _type = "HttpNfcLease" [ 724.878710] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 724.889031] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 724.889031] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bf4fa-d5b4-9413-d993-dc9922cfba8e" [ 724.889031] env[62914]: _type = "HttpNfcLease" [ 724.889031] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 724.904984] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.000972] env[62914]: DEBUG nova.network.neutron [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Updating instance_info_cache with network_info: [{"id": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "address": "fa:16:3e:45:7a:af", "network": {"id": "12d5462a-a891-4c1e-9a7c-2c1ded5fd046", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1701586483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e65b92475a4d0ab57070424a4eebff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb0a3a2-35", "ovs_interfaceid": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.053636] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831644, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.061215] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831645, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.126019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de2fa4b-595c-42fc-8321-c76b05b18afe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.134548] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9ed9b8-5713-46ca-9d62-8ac14e0ce52b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.175728] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb341da-d01e-4d19-bc83-3f737f2332b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.193765] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831646, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.193765] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a20c4b-36d1-486c-b67b-9c80ed044951 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.212039] env[62914]: DEBUG nova.compute.provider_tree [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.355368] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831647, 'name': Rename_Task, 'duration_secs': 0.346026} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.355844] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 725.356860] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-061166bd-f241-4fb4-bfdd-44867c2dca12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.367338] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 725.367338] env[62914]: value = "task-4831649" [ 725.367338] env[62914]: _type = "Task" [ 725.367338] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.379613] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831649, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.389380] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 725.389380] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bf4fa-d5b4-9413-d993-dc9922cfba8e" [ 725.389380] env[62914]: _type = "HttpNfcLease" [ 725.389380] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 725.394273] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 725.394273] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bf4fa-d5b4-9413-d993-dc9922cfba8e" [ 725.394273] env[62914]: _type = "HttpNfcLease" [ 725.394273] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 725.395116] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a9ab84-b3b6-4bd9-bab1-19a7e1fe879a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.404097] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 725.404488] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 725.409285] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831643, 'name': ReconfigVM_Task, 'duration_secs': 0.590966} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.410611] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfigured VM instance instance-00000023 to attach disk [datastore1] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.468154] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-317cfc7d-b345-42df-9e31-75057ddc81b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.476834] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 725.476834] env[62914]: value = "task-4831650" [ 725.476834] env[62914]: _type = "Task" [ 725.476834] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.489103] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831650, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.503700] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Releasing lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.504171] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Instance network_info: |[{"id": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "address": "fa:16:3e:45:7a:af", "network": {"id": "12d5462a-a891-4c1e-9a7c-2c1ded5fd046", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1701586483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e65b92475a4d0ab57070424a4eebff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb0a3a2-35", "ovs_interfaceid": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 725.504518] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Acquired lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.504860] env[62914]: DEBUG nova.network.neutron [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Refreshing network info cache for port 6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.506687] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:7a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fb0a3a2-3560-49e6-85bc-32473f0b2985', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.521858] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Creating folder: Project (92e65b92475a4d0ab57070424a4eebff). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.528562] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a2b9d17-ecc4-4a62-8635-ea34811c7d14 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.530734] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6419d71f-a49f-4184-a78c-84862b56fd39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.545134] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Created folder: Project (92e65b92475a4d0ab57070424a4eebff) in parent group-v941773. [ 725.545134] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Creating folder: Instances. Parent ref: group-v941896. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.549569] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69ca5272-8611-4de3-9e53-aadc18a89a9b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.553999] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831644, 'name': CreateSnapshot_Task, 'duration_secs': 0.984478} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.555843] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 725.556346] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284e3871-2e1b-4aa4-ad6a-f7f6ffba001a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.562522] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831645, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.565709] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Created folder: Instances in parent group-v941896. [ 725.565831] env[62914]: DEBUG oslo.service.loopingcall [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.566144] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.569709] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41efb361-44fe-4cee-af27-997f3a2bff11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.597546] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.597546] env[62914]: value = "task-4831653" [ 725.597546] env[62914]: _type = "Task" [ 725.597546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.607972] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831653, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.694043] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831646, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.819454} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.698026] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 725.698026] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.698026] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c41674a7-c6bc-4abf-b201-b5ddd15818c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.709021] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 725.709021] env[62914]: value = "task-4831654" [ 725.709021] env[62914]: _type = "Task" [ 725.709021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.716751] env[62914]: DEBUG nova.scheduler.client.report [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.722726] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.780248] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Successfully updated port: f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.882113] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831649, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.989602] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831650, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.055270] env[62914]: DEBUG oslo_vmware.api [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831645, 'name': PowerOnVM_Task, 'duration_secs': 1.262208} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.055843] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 726.056319] env[62914]: INFO nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Took 13.51 seconds to spawn the instance on the hypervisor. [ 726.056647] env[62914]: DEBUG nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 726.057849] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7987d69-6d50-4922-a871-9005f9bdfbde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.102304] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 726.102854] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-37331f7f-f5ef-49d1-9b15-19dc39e937aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.121304] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831653, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.123489] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 726.123489] env[62914]: value = "task-4831655" [ 726.123489] env[62914]: _type = "Task" [ 726.123489] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.144124] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.220170] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.225904] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.226462] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 726.237133] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.032s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.237518] env[62914]: DEBUG nova.objects.instance [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 726.284331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.284331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.284331] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 726.383580] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831649, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.496217] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831650, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.582708] env[62914]: INFO nova.compute.manager [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Took 52.40 seconds to build instance. [ 726.628663] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831653, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.641097] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.694196] env[62914]: DEBUG nova.network.neutron [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Updated VIF entry in instance network info cache for port 6fb0a3a2-3560-49e6-85bc-32473f0b2985. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.694661] env[62914]: DEBUG nova.network.neutron [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Updating instance_info_cache with network_info: [{"id": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "address": "fa:16:3e:45:7a:af", "network": {"id": "12d5462a-a891-4c1e-9a7c-2c1ded5fd046", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1701586483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92e65b92475a4d0ab57070424a4eebff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb0a3a2-35", "ovs_interfaceid": "6fb0a3a2-3560-49e6-85bc-32473f0b2985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.725554] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.746959] env[62914]: DEBUG nova.compute.utils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.748757] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 726.748975] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 726.886353] env[62914]: DEBUG oslo_vmware.api [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831649, 'name': PowerOnVM_Task, 'duration_secs': 1.359972} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.886893] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 726.889303] env[62914]: INFO nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Took 11.60 seconds to spawn the instance on the hypervisor. [ 726.889303] env[62914]: DEBUG nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 726.889303] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbfcf89e-7af4-48a3-8d3d-17dfed65e615 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.893850] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 726.972135] env[62914]: DEBUG nova.policy [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dda0f12511324c52b00236c75b33acc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e2b3db08ee34716be135d72b3ddda8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 726.991931] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831650, 'name': Rename_Task, 'duration_secs': 1.271037} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.993047] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 726.994455] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e521f4dc-cfe6-4b0e-a958-ecb61979d26d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.006333] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 727.006333] env[62914]: value = "task-4831656" [ 727.006333] env[62914]: _type = "Task" [ 727.006333] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.027692] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.084626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bbaecb67-3608-42b4-9048-ef660eecc298 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.140s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.115692] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831653, 'name': CreateVM_Task, 'duration_secs': 1.410773} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.119138] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 727.119138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.119138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.119138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 727.119138] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b20c689f-85fd-42cc-85ef-ef26bf21d32b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.125755] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 727.125755] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d04a99-a343-55c6-4405-0dffb48f2c20" [ 727.125755] env[62914]: _type = "Task" [ 727.125755] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.143019] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d04a99-a343-55c6-4405-0dffb48f2c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.144929] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.199195] env[62914]: DEBUG oslo_concurrency.lockutils [req-686c149d-ef83-4688-97f9-f68b50b69493 req-878e8a0e-c492-44ff-ba85-811ccaad2c24 service nova] Releasing lock "refresh_cache-1342d15d-fbef-4709-adf6-f827bc13d3ca" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.222532] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.167757} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.222858] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.223821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1479e3e1-11bf-427f-a380-7b6d67c7a0cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.256076] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.259630] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85019fd4-d1c4-4308-b369-b3787ddea22c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.276166] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3572788b-7a4f-4f2e-92d2-127580c6f47d tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.039s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.277592] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 727.281087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.469s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.281506] env[62914]: DEBUG nova.objects.instance [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lazy-loading 'resources' on Instance uuid 567f3d61-ed30-47d9-aebc-77c9392be506 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.289649] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 727.289649] env[62914]: value = "task-4831657" [ 727.289649] env[62914]: _type = "Task" [ 727.289649] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.301981] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.434204] env[62914]: INFO nova.compute.manager [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Took 50.23 seconds to build instance. [ 727.494738] env[62914]: DEBUG nova.network.neutron [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updating instance_info_cache with network_info: [{"id": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "address": "fa:16:3e:9d:cf:6f", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c2416f-fd", "ovs_interfaceid": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.506258] env[62914]: DEBUG nova.compute.manager [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Received event network-vif-plugged-f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 727.506358] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Acquiring lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.506597] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.507213] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.507213] env[62914]: DEBUG nova.compute.manager [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] No waiting events found dispatching network-vif-plugged-f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 727.507322] env[62914]: WARNING nova.compute.manager [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Received unexpected event network-vif-plugged-f2c2416f-fd5d-479b-b87b-5c00e77e23d0 for instance with vm_state building and task_state spawning. [ 727.507477] env[62914]: DEBUG nova.compute.manager [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Received event network-changed-f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 727.507630] env[62914]: DEBUG nova.compute.manager [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Refreshing instance network info cache due to event network-changed-f2c2416f-fd5d-479b-b87b-5c00e77e23d0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 727.507858] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Acquiring lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.520420] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.587754] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 727.645225] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d04a99-a343-55c6-4405-0dffb48f2c20, 'name': SearchDatastore_Task, 'duration_secs': 0.034929} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.648317] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.648317] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.648885] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.649647] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.649647] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.650082] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.650314] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a34f54d1-b10f-4148-a6b6-945cacab0d49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.670309] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.670309] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 727.672306] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82babc89-57ec-4e47-ad5d-46cb419f0742 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.686046] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 727.686046] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ebbaab-2b93-96a2-319c-e8a14f2752be" [ 727.686046] env[62914]: _type = "Task" [ 727.686046] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.693138] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ebbaab-2b93-96a2-319c-e8a14f2752be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.804648] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.849780] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Successfully created port: c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.939018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7533229e-56ee-49e7-9ff2-0525b7e80b1d tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.290s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.998648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.999038] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Instance network_info: |[{"id": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "address": "fa:16:3e:9d:cf:6f", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c2416f-fd", "ovs_interfaceid": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 728.000825] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Acquired lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.000825] env[62914]: DEBUG nova.network.neutron [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Refreshing network info cache for port f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 728.001305] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:cf:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2c2416f-fd5d-479b-b87b-5c00e77e23d0', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 728.011737] env[62914]: DEBUG oslo.service.loopingcall [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 728.012386] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 728.016150] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30b1ee3d-e100-49b6-a4c1-058c677934de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.043279] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.045193] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 728.045193] env[62914]: value = "task-4831658" [ 728.045193] env[62914]: _type = "Task" [ 728.045193] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.057933] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831658, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.114215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.149197] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.199862] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ebbaab-2b93-96a2-319c-e8a14f2752be, 'name': SearchDatastore_Task, 'duration_secs': 0.04571} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.201848] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97d4e7e7-6749-4001-9126-61451f50face {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.215922] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 728.215922] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526000bc-dfcd-32d8-7403-20513455a649" [ 728.215922] env[62914]: _type = "Task" [ 728.215922] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.226961] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526000bc-dfcd-32d8-7403-20513455a649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.249008] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe3ea0a-c4c5-40e0-b87e-b995ae4b20cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.259206] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc9b0e0-35a0-4efd-aafe-23c73e847351 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.294515] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 728.301369] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b14f95-ca95-471f-9194-496475464bc5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.315292] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a815b9-2262-448c-affb-158ad2092c29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.318185] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.330970] env[62914]: DEBUG nova.compute.provider_tree [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.347115] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:24:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd1b046e-6be2-4ac8-bbb2-0adf61fb18f6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1771667993',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.347373] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.347549] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.347751] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.347902] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.348106] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.349075] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.349075] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.349075] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.349075] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.349075] env[62914]: DEBUG nova.virt.hardware [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.350618] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb01d0ed-e6e5-4a1c-9de1-1703cf2d7926 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.360466] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8350b0-508a-4e68-aa21-7799cd7c77de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.526802] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.558625] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831658, 'name': CreateVM_Task, 'duration_secs': 0.497384} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.558625] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 728.559943] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.559943] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.559943] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 728.560075] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3212eb22-d952-40ec-baae-3e582fdb8b6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.566942] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 728.566942] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109e5d-19b4-e768-cb75-e0022cb413ef" [ 728.566942] env[62914]: _type = "Task" [ 728.566942] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.577631] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109e5d-19b4-e768-cb75-e0022cb413ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.647073] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831655, 'name': CloneVM_Task, 'duration_secs': 2.100224} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.647612] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Created linked-clone VM from snapshot [ 728.648949] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9161a57f-7580-47de-8329-d6d843c40912 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.658122] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Uploading image 8a856bbf-685d-4149-8dc7-f87421df1e7b {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 728.699171] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 728.699171] env[62914]: value = "vm-941899" [ 728.699171] env[62914]: _type = "VirtualMachine" [ 728.699171] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 728.699528] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e37612f6-69e9-460f-bcf4-8de711df5542 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.708362] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease: (returnval){ [ 728.708362] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ead80b-0f8b-a33c-b2e6-3aed2aa1ddf0" [ 728.708362] env[62914]: _type = "HttpNfcLease" [ 728.708362] env[62914]: } obtained for exporting VM: (result){ [ 728.708362] env[62914]: value = "vm-941899" [ 728.708362] env[62914]: _type = "VirtualMachine" [ 728.708362] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 728.709183] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the lease: (returnval){ [ 728.709183] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ead80b-0f8b-a33c-b2e6-3aed2aa1ddf0" [ 728.709183] env[62914]: _type = "HttpNfcLease" [ 728.709183] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 728.717503] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 728.717503] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ead80b-0f8b-a33c-b2e6-3aed2aa1ddf0" [ 728.717503] env[62914]: _type = "HttpNfcLease" [ 728.717503] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 728.730908] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526000bc-dfcd-32d8-7403-20513455a649, 'name': SearchDatastore_Task, 'duration_secs': 0.041257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.730908] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.730908] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1342d15d-fbef-4709-adf6-f827bc13d3ca/1342d15d-fbef-4709-adf6-f827bc13d3ca.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 728.730908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ba78944-91e7-4903-903e-163df8f709c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.739959] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 728.739959] env[62914]: value = "task-4831660" [ 728.739959] env[62914]: _type = "Task" [ 728.739959] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.750096] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.810042] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.836381] env[62914]: DEBUG nova.scheduler.client.report [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.030447] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.089242] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52109e5d-19b4-e768-cb75-e0022cb413ef, 'name': SearchDatastore_Task, 'duration_secs': 0.014795} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.089242] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "968cbfbe-1570-48d6-890d-c7a680855574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.089242] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.090362] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.090724] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.091091] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.091160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.091434] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.092467] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1769649-a1ea-4bb4-a1a9-b6bb8d2d82eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.103901] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.104185] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 729.106411] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdf5a859-e863-4378-bac4-e736d554d18e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.114166] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 729.114166] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522169b8-798b-f17a-6966-4b8d4fcd7316" [ 729.114166] env[62914]: _type = "Task" [ 729.114166] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.127973] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522169b8-798b-f17a-6966-4b8d4fcd7316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.210219] env[62914]: DEBUG nova.network.neutron [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updated VIF entry in instance network info cache for port f2c2416f-fd5d-479b-b87b-5c00e77e23d0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 729.210439] env[62914]: DEBUG nova.network.neutron [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updating instance_info_cache with network_info: [{"id": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "address": "fa:16:3e:9d:cf:6f", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c2416f-fd", "ovs_interfaceid": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.227512] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 729.227512] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ead80b-0f8b-a33c-b2e6-3aed2aa1ddf0" [ 729.227512] env[62914]: _type = "HttpNfcLease" [ 729.227512] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 729.230096] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 729.230096] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ead80b-0f8b-a33c-b2e6-3aed2aa1ddf0" [ 729.230096] env[62914]: _type = "HttpNfcLease" [ 729.230096] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 729.230096] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b39433-7a72-487e-978b-8265fe9e94bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.243295] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 729.243295] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 729.318401] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.327490] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.347378] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.351432] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.313s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.351798] env[62914]: DEBUG nova.objects.instance [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lazy-loading 'resources' on Instance uuid 5bba4aa5-2b92-42b4-8516-72298a99f0e6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.362989] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-392b4370-5979-4081-8a44-d9aca9936c20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.398289] env[62914]: INFO nova.scheduler.client.report [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Deleted allocations for instance 567f3d61-ed30-47d9-aebc-77c9392be506 [ 729.527013] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.598323] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 729.636202] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522169b8-798b-f17a-6966-4b8d4fcd7316, 'name': SearchDatastore_Task, 'duration_secs': 0.012147} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.637265] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f730acf-ef1c-4905-86e3-cb595aafca85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.647236] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 729.647236] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c07aed-7ab3-1b79-5086-31100d29a54d" [ 729.647236] env[62914]: _type = "Task" [ 729.647236] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.667854] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c07aed-7ab3-1b79-5086-31100d29a54d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.721387] env[62914]: DEBUG oslo_concurrency.lockutils [req-27338cbb-27e8-4416-bb4c-e578395913a9 req-9f4aca73-0d1d-460b-abce-66c77cc13d09 service nova] Releasing lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.762844] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.821125] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.856286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.856286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.856286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.856286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.856286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.859512] env[62914]: INFO nova.compute.manager [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Terminating instance [ 729.863410] env[62914]: DEBUG nova.compute.manager [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 729.865118] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 729.865118] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ca8723-430f-41f7-92af-85b0ec186e63 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.875977] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 729.879252] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0bd27e4-084f-4494-92e7-0153883404e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.887486] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 729.887486] env[62914]: value = "task-4831661" [ 729.887486] env[62914]: _type = "Task" [ 729.887486] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.899870] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.908257] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d313a15-2e7a-4899-b76d-e8f04132a31c tempest-InstanceActionsNegativeTestJSON-382922751 tempest-InstanceActionsNegativeTestJSON-382922751-project-member] Lock "567f3d61-ed30-47d9-aebc-77c9392be506" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.177s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.031705] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.122676] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.165109] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c07aed-7ab3-1b79-5086-31100d29a54d, 'name': SearchDatastore_Task, 'duration_secs': 0.015601} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.165567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.166077] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/8b83f82b-42f7-4f33-abc4-ff278d343309.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 730.166428] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ad6f136-e10a-4299-926c-f6a635475d88 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.182492] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 730.182492] env[62914]: value = "task-4831662" [ 730.182492] env[62914]: _type = "Task" [ 730.182492] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.206684] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.260934] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831660, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.320118] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.386074] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6f1655-ec5b-40fa-9d8a-c582e5b34ae3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.406967] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fd1b08-4de8-4244-880c-14c4dfe4a555 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.411240] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831661, 'name': PowerOffVM_Task, 'duration_secs': 0.400694} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.412213] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 730.412440] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 730.413120] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37d5a746-848c-4ded-94e7-4f2e9e378a47 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.452532] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214074a7-752f-40d9-b9f5-dc59befb294a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.465912] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6085a028-0fed-4cd0-a6ef-694aa578d0df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.490866] env[62914]: DEBUG nova.compute.provider_tree [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.531048] env[62914]: DEBUG oslo_vmware.api [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831656, 'name': PowerOnVM_Task, 'duration_secs': 3.238986} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.531418] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 730.531637] env[62914]: DEBUG nova.compute.manager [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 730.532571] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5872f2e0-974b-4e79-9d32-8075b48dd0e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.651542] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 730.652160] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 730.652160] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Deleting the datastore file [datastore1] bf2e9634-66ee-4b6a-a148-bc77420d793f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 730.652706] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24917e32-e0b9-4df4-b574-511296c526ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.661315] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for the task: (returnval){ [ 730.661315] env[62914]: value = "task-4831664" [ 730.661315] env[62914]: _type = "Task" [ 730.661315] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.678023] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.684256] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Successfully updated port: c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.701243] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831662, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.761902] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831660, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.888835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.762819] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1342d15d-fbef-4709-adf6-f827bc13d3ca/1342d15d-fbef-4709-adf6-f827bc13d3ca.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 730.763945] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 730.764553] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c524ed12-3b44-4268-b632-dc7eee2906cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.779079] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 730.779079] env[62914]: value = "task-4831665" [ 730.779079] env[62914]: _type = "Task" [ 730.779079] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.792042] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.826590] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831657, 'name': ReconfigVM_Task, 'duration_secs': 3.216257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.827248] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.828777] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c6d4a85-174e-46a6-a37c-651012b5fc8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.838652] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 730.838652] env[62914]: value = "task-4831666" [ 730.838652] env[62914]: _type = "Task" [ 730.838652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.852159] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831666, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.994445] env[62914]: DEBUG nova.scheduler.client.report [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.059996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.173384] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.193831] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.193977] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.194148] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 731.203552] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.929653} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.203992] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/8b83f82b-42f7-4f33-abc4-ff278d343309.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 731.204240] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.204539] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-865e4c2e-716a-42fe-ad39-3145af2313cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.216169] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 731.216169] env[62914]: value = "task-4831667" [ 731.216169] env[62914]: _type = "Task" [ 731.216169] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.229756] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.296956] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.322871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.297199] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.298198] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315cfcc6-4081-4bc7-8807-a0e8368a01d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.337023] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 1342d15d-fbef-4709-adf6-f827bc13d3ca/1342d15d-fbef-4709-adf6-f827bc13d3ca.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.337023] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a79faa8c-d3ea-4a81-9421-1c7333d121ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.360960] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831666, 'name': Rename_Task, 'duration_secs': 0.427016} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.363117] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 731.363740] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 731.363740] env[62914]: value = "task-4831668" [ 731.363740] env[62914]: _type = "Task" [ 731.363740] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.364556] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0263d19-b8cf-42ae-9c8b-7ef81d15d098 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.380189] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831668, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.382154] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 731.382154] env[62914]: value = "task-4831669" [ 731.382154] env[62914]: _type = "Task" [ 731.382154] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.393060] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.500577] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.504997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.227s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.506891] env[62914]: INFO nova.compute.claims [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.561520] env[62914]: INFO nova.scheduler.client.report [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Deleted allocations for instance 5bba4aa5-2b92-42b4-8516-72298a99f0e6 [ 731.674086] env[62914]: DEBUG oslo_vmware.api [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Task: {'id': task-4831664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.657433} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.675271] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.675271] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 731.675271] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 731.675271] env[62914]: INFO nova.compute.manager [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Took 1.81 seconds to destroy the instance on the hypervisor. [ 731.675271] env[62914]: DEBUG oslo.service.loopingcall [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.675271] env[62914]: DEBUG nova.compute.manager [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 731.675271] env[62914]: DEBUG nova.network.neutron [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.726529] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081427} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.726898] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.728268] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c6e6a-8b88-4649-88d4-2a521825c264 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.751224] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/8b83f82b-42f7-4f33-abc4-ff278d343309.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.751596] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb3e4adf-818f-410f-8c7f-548fdfece050 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.768672] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.776035] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 731.776035] env[62914]: value = "task-4831670" [ 731.776035] env[62914]: _type = "Task" [ 731.776035] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.786219] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.878987] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.894245] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831669, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.075743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4374b69f-6e6d-4acf-aadf-a498a24b8ec4 tempest-ServerShowV257Test-2042433145 tempest-ServerShowV257Test-2042433145-project-member] Lock "5bba4aa5-2b92-42b4-8516-72298a99f0e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.823s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.291835] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.367626] env[62914]: DEBUG nova.network.neutron [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.393482] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831668, 'name': ReconfigVM_Task, 'duration_secs': 0.717674} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.395255] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 1342d15d-fbef-4709-adf6-f827bc13d3ca/1342d15d-fbef-4709-adf6-f827bc13d3ca.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.396967] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc1eb494-1647-4149-b29a-05ac53c6344e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.403707] env[62914]: DEBUG oslo_vmware.api [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831669, 'name': PowerOnVM_Task, 'duration_secs': 1.003123} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.404676] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 732.404942] env[62914]: INFO nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Took 14.38 seconds to spawn the instance on the hypervisor. [ 732.405323] env[62914]: DEBUG nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 732.406143] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab0e4f3-61cc-4b69-a7cb-42ed63f04b6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.413655] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 732.413655] env[62914]: value = "task-4831671" [ 732.413655] env[62914]: _type = "Task" [ 732.413655] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.432102] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831671, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.794223] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831670, 'name': ReconfigVM_Task, 'duration_secs': 0.614585} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.794223] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/8b83f82b-42f7-4f33-abc4-ff278d343309.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.797623] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f463c12c-21b1-48cc-a10b-046120a8853f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.808459] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 732.808459] env[62914]: value = "task-4831672" [ 732.808459] env[62914]: _type = "Task" [ 732.808459] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.819363] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831672, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.882639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.882987] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Instance network_info: |[{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 732.883455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:52:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c148a862-a6a8-4c52-b1df-8e764ee00e94', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.891961] env[62914]: DEBUG oslo.service.loopingcall [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.895288] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 732.896065] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdd7a5bf-0351-4dcd-997e-8349298cd9c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.923616] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.923616] env[62914]: value = "task-4831673" [ 732.923616] env[62914]: _type = "Task" [ 732.923616] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.931255] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831671, 'name': Rename_Task, 'duration_secs': 0.285076} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.941703] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 732.942561] env[62914]: INFO nova.compute.manager [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Took 54.25 seconds to build instance. [ 732.947023] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-841555da-97d9-46fa-b496-6a16e85aa2ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.953973] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831673, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.957046] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 732.957046] env[62914]: value = "task-4831674" [ 732.957046] env[62914]: _type = "Task" [ 732.957046] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.970763] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.996606] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb6ad27-3e2b-4b41-b2e6-98925579f6c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.009796] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5b8842-bb8b-402e-b56e-f4f27989e248 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.059566] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da1768d-fe7f-4a39-9375-3abb30adc836 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.070448] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8f9d69-f75e-4cd3-b477-8b5e01ecdac7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.090662] env[62914]: DEBUG nova.network.neutron [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.092071] env[62914]: DEBUG nova.compute.provider_tree [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.331042] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831672, 'name': Rename_Task, 'duration_secs': 0.217831} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.331042] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 733.331265] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9ddf3cc-5ffb-4586-90e2-4773c69589f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.342296] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 733.342296] env[62914]: value = "task-4831675" [ 733.342296] env[62914]: _type = "Task" [ 733.342296] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.360063] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.443172] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831673, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.450372] env[62914]: DEBUG oslo_concurrency.lockutils [None req-38084650-faa8-4068-b8b7-78c70fda253b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.341s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.469300] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831674, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.595061] env[62914]: INFO nova.compute.manager [-] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Took 1.92 seconds to deallocate network for instance. [ 733.596425] env[62914]: DEBUG nova.scheduler.client.report [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.857444] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.942143] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831673, 'name': CreateVM_Task, 'duration_secs': 0.519982} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.944175] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 733.944175] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.944384] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.945782] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 733.945989] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e688ad-9ac2-4e79-9d14-50ae22227d9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.954404] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 733.954404] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb2ed6-8a8d-bf5d-5ecc-2856ff03a394" [ 733.954404] env[62914]: _type = "Task" [ 733.954404] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.972844] env[62914]: DEBUG oslo_vmware.api [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831674, 'name': PowerOnVM_Task, 'duration_secs': 0.903668} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.976649] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 733.980471] env[62914]: INFO nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Took 12.27 seconds to spawn the instance on the hypervisor. [ 733.980704] env[62914]: DEBUG nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 733.981097] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb2ed6-8a8d-bf5d-5ecc-2856ff03a394, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.981969] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb792f8b-6597-4500-a08e-b57fd9af75e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.113215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.113937] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.114529] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 734.117556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.672s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.117834] env[62914]: DEBUG nova.objects.instance [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lazy-loading 'resources' on Instance uuid 82aab17d-a6d0-48cf-a59a-fbef7d402894 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 734.251136] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 734.252656] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2de1db2-dacb-4198-b662-9e466709a42f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.264018] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 734.264018] env[62914]: ERROR oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk due to incomplete transfer. [ 734.264018] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-98d75083-4157-407a-807e-213e62e3fedd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.276015] env[62914]: DEBUG oslo_vmware.rw_handles [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fb26f-37af-7039-9303-6531b9839012/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 734.276015] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Uploaded image 43e1e672-0912-4810-8ce8-f395286e9ba9 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 734.277838] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 734.278272] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e9387f6d-46a1-403c-851f-4c0e8ab835f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.287579] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 734.287579] env[62914]: value = "task-4831676" [ 734.287579] env[62914]: _type = "Task" [ 734.287579] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.298964] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831676, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.357435] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.471438] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb2ed6-8a8d-bf5d-5ecc-2856ff03a394, 'name': SearchDatastore_Task, 'duration_secs': 0.019052} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.471790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.472123] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.472522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.472896] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.473141] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.473631] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-666fe469-e863-4802-afe3-5f3266fb48e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.487837] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.488552] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 734.488952] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9b00563-64f0-4142-94fd-2a00bcb84769 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.505456] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 734.505456] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e7df00-ee12-5775-443e-fd9bc1f3559e" [ 734.505456] env[62914]: _type = "Task" [ 734.505456] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.512299] env[62914]: INFO nova.compute.manager [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Took 51.01 seconds to build instance. [ 734.525119] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e7df00-ee12-5775-443e-fd9bc1f3559e, 'name': SearchDatastore_Task, 'duration_secs': 0.015021} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.526139] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa59d4dc-dd5c-4452-b639-f4cfef62f4f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.533665] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 734.533665] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5269e044-f61c-e816-60f3-3281072c9b16" [ 734.533665] env[62914]: _type = "Task" [ 734.533665] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.545138] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5269e044-f61c-e816-60f3-3281072c9b16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.628073] env[62914]: DEBUG nova.compute.utils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.629835] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 734.808777] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831676, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.861352] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831675, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.017123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-93690a7c-7983-444f-9eca-053381ea7dad tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.519s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.048618] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5269e044-f61c-e816-60f3-3281072c9b16, 'name': SearchDatastore_Task, 'duration_secs': 0.015276} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.049423] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.049423] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 735.053176] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a928b5c-0697-4c21-9f6b-209339e82158 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.065408] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 735.065408] env[62914]: value = "task-4831677" [ 735.065408] env[62914]: _type = "Task" [ 735.065408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.080133] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.125288] env[62914]: DEBUG nova.compute.manager [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Received event network-vif-plugged-c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 735.125288] env[62914]: DEBUG oslo_concurrency.lockutils [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] Acquiring lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.125519] env[62914]: DEBUG oslo_concurrency.lockutils [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.125806] env[62914]: DEBUG oslo_concurrency.lockutils [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.125982] env[62914]: DEBUG nova.compute.manager [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] No waiting events found dispatching network-vif-plugged-c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 735.126227] env[62914]: WARNING nova.compute.manager [req-388dbdfe-e0b2-4d1c-b0d7-69349a2c4ff0 req-9951871a-999e-4d7c-9e11-1021a3cde71c service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Received unexpected event network-vif-plugged-c148a862-a6a8-4c52-b1df-8e764ee00e94 for instance with vm_state building and task_state spawning. [ 735.132819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b19f3f-2244-4a68-81f2-0073e64d4421 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.133719] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 735.143402] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170b1019-e797-4d5c-ab4f-19715649bbfb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.193793] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c60c4fa-f889-4910-93b0-c964740615e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.206965] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84cdadf-2e17-4cdd-b3ab-039e497ae5c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.238693] env[62914]: DEBUG nova.compute.provider_tree [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.305055] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831676, 'name': Destroy_Task, 'duration_secs': 0.595717} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.305055] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroyed the VM [ 735.305414] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 735.309266] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-84b70f33-7ed9-4f5f-b82f-bb1f95b33f27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.318218] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 735.318218] env[62914]: value = "task-4831678" [ 735.318218] env[62914]: _type = "Task" [ 735.318218] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.332051] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.363559] env[62914]: DEBUG oslo_vmware.api [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831675, 'name': PowerOnVM_Task, 'duration_secs': 1.675977} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.363894] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 735.364120] env[62914]: INFO nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Took 11.87 seconds to spawn the instance on the hypervisor. [ 735.364372] env[62914]: DEBUG nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 735.365186] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f7cf52-70e1-409f-bcf9-b16b2d067f06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.581854] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831677, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.662462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.663067] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.533236] env[62914]: DEBUG nova.scheduler.client.report [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.540762] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 736.544120] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 736.546712] env[62914]: DEBUG nova.compute.manager [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Received event network-changed-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 736.546920] env[62914]: DEBUG nova.compute.manager [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Refreshing instance network info cache due to event network-changed-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 736.547138] env[62914]: DEBUG oslo_concurrency.lockutils [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] Acquiring lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.547282] env[62914]: DEBUG oslo_concurrency.lockutils [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] Acquired lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.547438] env[62914]: DEBUG nova.network.neutron [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Refreshing network info cache for port 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.561370] env[62914]: INFO nova.compute.manager [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Took 52.26 seconds to build instance. [ 736.571498] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831677, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.577719] env[62914]: DEBUG oslo_vmware.api [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831678, 'name': RemoveSnapshot_Task, 'duration_secs': 0.834975} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.577719] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 736.577719] env[62914]: INFO nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 16.46 seconds to snapshot the instance on the hypervisor. [ 736.593346] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.593608] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.593766] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.593949] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.594109] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.594262] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.594508] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.594725] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.594913] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.595119] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.595304] env[62914]: DEBUG nova.virt.hardware [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.597132] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cd51e4-755f-49e1-b723-6e62f4f176c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.608069] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba84f92b-6ad3-45ca-8723-e73cd13750bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.628177] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.634408] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Creating folder: Project (32ab7832cc3b44eeb8aae779ab03d1eb). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 736.635798] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3c1c4ca-ea22-4a80-b434-7568dbdb1f1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.653487] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Created folder: Project (32ab7832cc3b44eeb8aae779ab03d1eb) in parent group-v941773. [ 736.653487] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Creating folder: Instances. Parent ref: group-v941902. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 736.653487] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aba8da5c-06b2-4375-8f32-02e6e4fae26d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.664897] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Created folder: Instances in parent group-v941902. [ 736.664950] env[62914]: DEBUG oslo.service.loopingcall [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.665167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 736.665393] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6080a71d-8787-4229-96cd-39b90a417a0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.689974] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.689974] env[62914]: value = "task-4831681" [ 736.689974] env[62914]: _type = "Task" [ 736.689974] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.703404] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.063806] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.945s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.066395] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831677, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.669723} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.070266] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.712s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.074195] env[62914]: INFO nova.compute.claims [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.076079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9e9c3fd6-f02a-4790-a631-b8a9811bc51a tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.789s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.077042] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 737.077042] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.078035] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4a90e09-7238-4d54-a598-69316741acb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.082601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.082871] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.083103] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.084506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.084506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.092284] env[62914]: INFO nova.scheduler.client.report [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Deleted allocations for instance 82aab17d-a6d0-48cf-a59a-fbef7d402894 [ 737.092784] env[62914]: INFO nova.compute.manager [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Terminating instance [ 737.096066] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 737.096066] env[62914]: value = "task-4831682" [ 737.096066] env[62914]: _type = "Task" [ 737.096066] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.097060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.100884] env[62914]: DEBUG nova.compute.manager [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 737.100884] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.106022] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14577c1a-d55a-46fb-8f3d-8b775feb8673 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.116507] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.118841] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 737.119145] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7812553-af61-4e55-a0a2-b73886948d00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.128289] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 737.128289] env[62914]: value = "task-4831683" [ 737.128289] env[62914]: _type = "Task" [ 737.128289] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.138786] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.186649] env[62914]: DEBUG nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Found 3 images (rotation: 2) {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 737.188237] env[62914]: DEBUG nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Rotating out 1 backups {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 737.188237] env[62914]: DEBUG nova.compute.manager [None req-f104d765-9b27-4625-ae64-c3e03ab93c99 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleting image c7276d8e-d0f8-4d5c-b179-a978d8bd33c6 {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 737.202777] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.536528] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.536528] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.612778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-412151e1-6e45-4d79-8d97-242295f24285 tempest-ServersTestFqdnHostnames-2028362432 tempest-ServersTestFqdnHostnames-2028362432-project-member] Lock "82aab17d-a6d0-48cf-a59a-fbef7d402894" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.429s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.620967] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.654581] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.710298] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.833559] env[62914]: DEBUG nova.network.neutron [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updated VIF entry in instance network info cache for port 8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 737.833977] env[62914]: DEBUG nova.network.neutron [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updating instance_info_cache with network_info: [{"id": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "address": "fa:16:3e:de:7a:42", "network": {"id": "ecf0d16a-6826-48e3-a9ed-8114774cf045", "bridge": "br-int", "label": "tempest-ServersTestJSON-657617940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5214037507114e7eafb35779261bde06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eb60abb-6f", "ovs_interfaceid": "8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.853467] env[62914]: INFO nova.compute.manager [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Rebuilding instance [ 737.915040] env[62914]: DEBUG nova.compute.manager [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 737.916176] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fdcd1b-51dc-49f5-a170-69a7c367ffa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.040187] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 738.124197] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.149479] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.210877] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.236446] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "e061304c-998b-4331-b60d-809916844a6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.236446] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.339804] env[62914]: DEBUG oslo_concurrency.lockutils [req-aade26e6-d309-4aa7-b78c-ef28b20cd4d2 req-903c9c9c-de5b-4e03-a648-7a01604d06c5 service nova] Releasing lock "refresh_cache-4496a977-30b2-4323-a561-884633958cdf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.430235] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 738.437707] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db040ed6-2792-4df9-9fe4-fb86e8927bc5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.447695] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 738.447695] env[62914]: value = "task-4831684" [ 738.447695] env[62914]: _type = "Task" [ 738.447695] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.469690] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.583878] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.622250] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.649723] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.656776] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8d68b1-55e1-4ac1-8d80-a8d8cde6219d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.669476] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2355da-27b1-49df-b2c8-01ec52a4ae91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.709795] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f5f5ff-920d-47c4-876b-7aea9af0671e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.722695] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b97e45-1537-4c46-9a43-fb9e848b8f06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.727031] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.740552] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 738.743504] env[62914]: DEBUG nova.compute.provider_tree [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.958680] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831684, 'name': PowerOffVM_Task, 'duration_secs': 0.251305} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.958931] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 738.959417] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 738.960024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec81329-1a5f-4206-a2e9-0654a76a80d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.973046] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 738.973262] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7dc2770f-3fc8-40b4-b9f3-4e778ac4ae30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.005926] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 739.006571] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 739.006682] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Deleting the datastore file [datastore1] cead3557-080d-4956-a957-cac449bb69f6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.007181] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e25b0be8-08bb-44d7-96ec-698722cb6d23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.018031] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 739.018031] env[62914]: value = "task-4831686" [ 739.018031] env[62914]: _type = "Task" [ 739.018031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.032888] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.125772] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.935692} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.128667] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.128829] env[62914]: DEBUG nova.compute.manager [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Received event network-changed-c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 739.129350] env[62914]: DEBUG nova.compute.manager [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Refreshing instance network info cache due to event network-changed-c148a862-a6a8-4c52-b1df-8e764ee00e94. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 739.129350] env[62914]: DEBUG oslo_concurrency.lockutils [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.129608] env[62914]: DEBUG oslo_concurrency.lockutils [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.129608] env[62914]: DEBUG nova.network.neutron [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Refreshing network info cache for port c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.132178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffff1e89-1d94-4871-b3bc-2e6af35542da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.138293] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.138726] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.139080] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.139686] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.140128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.142663] env[62914]: INFO nova.compute.manager [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Terminating instance [ 739.144801] env[62914]: DEBUG nova.compute.manager [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 739.145161] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.149860] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ba1b42-5a56-4fbb-88fc-082a22966791 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.163703] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "1d74504f-b641-42c6-a420-c80614d69b23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.163969] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.187683] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.193478] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d49e9152-a1be-496b-8ee8-38c6f965629c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.210751] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.216265] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75509e4f-724f-4fd8-a59f-49d286d08f38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.218118] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831683, 'name': PowerOffVM_Task, 'duration_secs': 1.97752} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.221857] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.222315] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 739.225448] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 739.230022] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-935d7c0d-8155-4284-b147-15c41d33cfb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.230022] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d969f000-5b07-49f8-b9fa-ea8d37bc6c57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.235884] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 739.235884] env[62914]: value = "task-4831687" [ 739.235884] env[62914]: _type = "Task" [ 739.235884] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.236583] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 739.236583] env[62914]: value = "task-4831688" [ 739.236583] env[62914]: _type = "Task" [ 739.236583] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.247269] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831681, 'name': CreateVM_Task, 'duration_secs': 2.38462} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.256235] env[62914]: DEBUG nova.scheduler.client.report [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.260870] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 739.261593] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 739.261776] env[62914]: ERROR oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk due to incomplete transfer. [ 739.263271] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.263477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.263908] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 739.264303] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-de28803c-b944-4677-a14d-a32c75e30dce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.274612] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d86ef756-ff07-405f-93d9-1f112632a9fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.279685] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.279685] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831687, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.285871] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 739.285871] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cf21a-b1e8-09c0-72b7-017874576b3c" [ 739.285871] env[62914]: _type = "Task" [ 739.285871] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.288338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.300644] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cf21a-b1e8-09c0-72b7-017874576b3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.302956] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfc423-9075-43f9-cc38-9072de248176/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 739.302956] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Uploaded image 8a856bbf-685d-4149-8dc7-f87421df1e7b to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 739.305328] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 739.305328] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-72a45284-32dd-4fb6-b2bf-b00f29fdf2ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.312637] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 739.312637] env[62914]: value = "task-4831690" [ 739.312637] env[62914]: _type = "Task" [ 739.312637] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.323346] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831690, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.339309] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 739.339647] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 739.340352] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleting the datastore file [datastore2] bfdd7711-d081-42cf-9e4a-2df556d1b72e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.340352] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16e0235f-13ef-4ccd-a06c-1df8897d3910 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.348134] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 739.348134] env[62914]: value = "task-4831691" [ 739.348134] env[62914]: _type = "Task" [ 739.348134] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.358949] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.533084] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294129} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.533084] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.533232] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 739.533446] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 739.764260] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.765543] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 739.768372] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831688, 'name': PowerOffVM_Task, 'duration_secs': 0.284807} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.775594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.514s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.777868] env[62914]: INFO nova.compute.claims [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.784492] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.784740] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 739.785350] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831687, 'name': ReconfigVM_Task, 'duration_secs': 0.508814} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.785984] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23445f9e-285d-4de8-8aa9-bf95d293389e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.788892] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.789061] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c19284cc-0d51-4152-bf3f-b4430cf82a9b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.813247] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cf21a-b1e8-09c0-72b7-017874576b3c, 'name': SearchDatastore_Task, 'duration_secs': 0.027864} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.814983] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.815279] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.816931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.816931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.816931] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.816931] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 739.816931] env[62914]: value = "task-4831693" [ 739.816931] env[62914]: _type = "Task" [ 739.816931] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.822044] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e470b752-06af-4c1d-887d-5b4630d1420e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.838375] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831690, 'name': Destroy_Task, 'duration_secs': 0.493822} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.844255] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Destroyed the VM [ 739.844563] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 739.846989] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831693, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.846989] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.846989] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 739.847136] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ec9a9664-ba67-4255-ab2c-a6414fba17ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.849282] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac1f3793-dbf6-4c70-bda9-3858847278af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.860794] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 739.860794] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527f5dd2-7907-90b7-0f6f-033a2e55097f" [ 739.860794] env[62914]: _type = "Task" [ 739.860794] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.864381] env[62914]: DEBUG oslo_vmware.api [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250789} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.869386] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.869645] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 739.869835] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 739.870048] env[62914]: INFO nova.compute.manager [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Took 2.77 seconds to destroy the instance on the hypervisor. [ 739.870829] env[62914]: DEBUG oslo.service.loopingcall [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.870956] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 739.870956] env[62914]: value = "task-4831694" [ 739.870956] env[62914]: _type = "Task" [ 739.870956] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.873988] env[62914]: DEBUG nova.compute.manager [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 739.874259] env[62914]: DEBUG nova.network.neutron [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.883374] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 739.883625] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 739.883808] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleting the datastore file [datastore2] 1fa01184-1ed2-43de-bcbf-bd8658acc9f9 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.888703] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-042c68ae-2084-4369-91eb-aadad983cf31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.891501] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527f5dd2-7907-90b7-0f6f-033a2e55097f, 'name': SearchDatastore_Task, 'duration_secs': 0.017459} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.895804] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831694, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.896784] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cec13012-26b0-4674-b58c-e5f2d2b72c5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.901497] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for the task: (returnval){ [ 739.901497] env[62914]: value = "task-4831695" [ 739.901497] env[62914]: _type = "Task" [ 739.901497] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.905185] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 739.905185] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5255f58d-4122-f1b6-c9c7-92bc6005ede7" [ 739.905185] env[62914]: _type = "Task" [ 739.905185] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.918387] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.921957] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5255f58d-4122-f1b6-c9c7-92bc6005ede7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.247998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.247998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.247998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.247998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.248215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.250046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.250284] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.253906] env[62914]: INFO nova.compute.manager [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Terminating instance [ 740.256354] env[62914]: DEBUG nova.compute.manager [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 740.256354] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 740.257257] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589671bf-095b-41e7-b17a-0e84c3c55c80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.265961] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 740.266321] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec242681-d5be-4d2b-82fa-6f9e5fb7365a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.271695] env[62914]: DEBUG nova.compute.utils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.276459] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 740.276459] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 740.277181] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 740.277181] env[62914]: value = "task-4831696" [ 740.277181] env[62914]: _type = "Task" [ 740.277181] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.291565] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.339386] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831693, 'name': Rename_Task, 'duration_secs': 0.286452} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.339869] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 740.340336] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca40021e-0536-46a1-80a5-05f747b8b3d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.349698] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 740.349698] env[62914]: value = "task-4831697" [ 740.349698] env[62914]: _type = "Task" [ 740.349698] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.359670] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.389156] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831694, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.423033] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5255f58d-4122-f1b6-c9c7-92bc6005ede7, 'name': SearchDatastore_Task, 'duration_secs': 0.015863} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.426520] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.426841] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 740.427189] env[62914]: DEBUG oslo_vmware.api [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Task: {'id': task-4831695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43025} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.427429] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6230c635-e64a-47fa-8413-c0842a167520 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.429993] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.430248] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 740.430434] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.430637] env[62914]: INFO nova.compute.manager [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Took 1.29 seconds to destroy the instance on the hypervisor. [ 740.430893] env[62914]: DEBUG oslo.service.loopingcall [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.431923] env[62914]: DEBUG nova.compute.manager [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 740.432043] env[62914]: DEBUG nova.network.neutron [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.440081] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 740.440081] env[62914]: value = "task-4831698" [ 740.440081] env[62914]: _type = "Task" [ 740.440081] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.450340] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.587766] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.587766] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.588074] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.588074] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.588284] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.588394] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.588638] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.588829] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.589043] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.589227] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.590189] env[62914]: DEBUG nova.virt.hardware [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.592088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345ea88d-e2e2-4601-9a05-431aa54cdd1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.604235] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d11363-a638-48c0-b31f-141384dadf27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.631090] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.633565] env[62914]: DEBUG oslo.service.loopingcall [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.633888] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.634182] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ae777ac-7261-43f4-9583-0c17b5f556aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.653310] env[62914]: DEBUG nova.network.neutron [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updated VIF entry in instance network info cache for port c148a862-a6a8-4c52-b1df-8e764ee00e94. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.653781] env[62914]: DEBUG nova.network.neutron [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.657328] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.657328] env[62914]: value = "task-4831699" [ 740.657328] env[62914]: _type = "Task" [ 740.657328] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.670288] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831699, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.708305] env[62914]: DEBUG nova.policy [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '100272696b464561889b452f7c318a34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7da23e8d3c044f178c224a3e40a346a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 740.778468] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 740.802506] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831696, 'name': PowerOffVM_Task, 'duration_secs': 0.509572} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.803329] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 740.803591] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 740.804420] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b64a4945-c118-43a2-b935-f4b6529b4549 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.873291] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831697, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.895507] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831694, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.915881] env[62914]: INFO nova.compute.manager [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Rescuing [ 740.915881] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.915881] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.915881] env[62914]: DEBUG nova.network.neutron [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 740.957869] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831698, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.158450] env[62914]: DEBUG oslo_concurrency.lockutils [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.158755] env[62914]: DEBUG nova.compute.manager [req-c82be7c4-62a0-4fab-a3fb-92ea78c8f65e req-f771f2e6-f65a-4fc0-a81d-4ec795208016 service nova] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Received event network-vif-deleted-13fa09d1-da09-4a01-bc28-c7fd25efb778 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 741.175960] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831699, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.365602] env[62914]: DEBUG oslo_vmware.api [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831697, 'name': PowerOnVM_Task, 'duration_secs': 0.669586} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.366650] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 741.366910] env[62914]: INFO nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Took 13.07 seconds to spawn the instance on the hypervisor. [ 741.367100] env[62914]: DEBUG nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 741.367981] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944d2c66-4731-4a53-9d0d-097cc693d5bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.396580] env[62914]: DEBUG oslo_vmware.api [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831694, 'name': RemoveSnapshot_Task, 'duration_secs': 1.080949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.397975] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 741.397975] env[62914]: INFO nova.compute.manager [None req-b40ae484-b277-4652-9bbc-d0784dbbbcbc tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Took 17.42 seconds to snapshot the instance on the hypervisor. [ 741.451732] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff47338-d2b7-49cc-88f5-79880b11f381 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.460983] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619741} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.461832] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 741.462025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.462277] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-245dc01a-aa7f-4c9b-8ac0-08bae5c9c9ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.468126] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0302b2e-4816-42fa-bb85-aa41934277fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.475054] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 741.475054] env[62914]: value = "task-4831701" [ 741.475054] env[62914]: _type = "Task" [ 741.475054] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.518042] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27db893f-4688-4e83-8575-4efd6ff384f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.522166] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.529144] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dd9969-b4ae-4e07-875c-3c691e864971 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.547250] env[62914]: DEBUG nova.compute.provider_tree [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.557974] env[62914]: DEBUG nova.network.neutron [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.671196] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831699, 'name': CreateVM_Task, 'duration_secs': 0.704342} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.671246] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.671670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.673495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.673495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.673495] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a42bb0a4-6a09-48d7-aab2-6b5f1d26cb15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.678630] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 741.678630] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bc8890-ac6d-bddc-73c5-4111438e81bd" [ 741.678630] env[62914]: _type = "Task" [ 741.678630] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.687306] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bc8890-ac6d-bddc-73c5-4111438e81bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.798320] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 741.831996] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.831996] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.831996] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.831996] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.831996] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.832269] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.832315] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.832482] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.832981] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.832981] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.832981] env[62914]: DEBUG nova.virt.hardware [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.834336] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1981fcb-444e-48b3-9f26-992cc07d1ead {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.850490] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2410feec-2bac-42ee-adc9-361c0c12784b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.910543] env[62914]: INFO nova.compute.manager [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Took 48.06 seconds to build instance. [ 741.987861] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093644} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.993317] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.994408] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b25321-2b4c-4c88-8173-98ccc0828888 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.030198] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.030198] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Successfully created port: e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.032793] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81141deb-f6cf-47eb-ab64-8db96bfc1bbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.051653] env[62914]: DEBUG nova.network.neutron [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updating instance_info_cache with network_info: [{"id": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "address": "fa:16:3e:9d:cf:6f", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c2416f-fd", "ovs_interfaceid": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.054204] env[62914]: DEBUG nova.scheduler.client.report [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.061107] env[62914]: INFO nova.compute.manager [-] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Took 2.19 seconds to deallocate network for instance. [ 742.064638] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 742.064638] env[62914]: value = "task-4831702" [ 742.064638] env[62914]: _type = "Task" [ 742.064638] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.079812] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.190619] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bc8890-ac6d-bddc-73c5-4111438e81bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011815} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.191056] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.191380] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.191792] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.191892] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.192114] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.192406] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18fc9856-02fe-4d72-a92d-c216c6e0f75e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.203019] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.203239] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 742.204781] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91b0896c-3252-48ba-a6c2-edb32f017335 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.211734] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 742.211734] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520c9e48-6da2-16e4-3e6c-ca4ed1e0487e" [ 742.211734] env[62914]: _type = "Task" [ 742.211734] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.221475] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520c9e48-6da2-16e4-3e6c-ca4ed1e0487e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.322563] env[62914]: DEBUG nova.network.neutron [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.415426] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b77847f6-e37d-4610-92c4-4e5b7bade7d5 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.616s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.533756] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.534401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.535493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.535493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.536062] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.542264] env[62914]: INFO nova.compute.manager [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Terminating instance [ 742.544659] env[62914]: DEBUG nova.compute.manager [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 742.544872] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 742.546163] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0112e1f-0454-446c-9a38-510a30a6d675 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.559120] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.562028] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.786s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.568524] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 742.574130] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.754s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.576736] env[62914]: INFO nova.compute.claims [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.580506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.582014] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 742.588626] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bae34b5a-b30d-4154-9c6d-48cf348c4db8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.604132] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.609529] env[62914]: DEBUG nova.compute.manager [req-8d79e518-747d-456c-9e83-c10dc26733f2 req-22ff7b01-a90d-4541-a34b-17457d5e2be0 service nova] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Received event network-vif-deleted-d1bc2a9f-2310-438f-a8f0-d1e1f60ef641 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 742.610461] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 742.610461] env[62914]: value = "task-4831703" [ 742.610461] env[62914]: _type = "Task" [ 742.610461] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.632736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.632736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.645735] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.727323] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520c9e48-6da2-16e4-3e6c-ca4ed1e0487e, 'name': SearchDatastore_Task, 'duration_secs': 0.013174} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.729193] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d1a2db0-8a6f-4e7b-8af7-958bef5931e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.739990] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 742.739990] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d1a887-dce5-e96b-4f93-ed61f4ec5aa5" [ 742.739990] env[62914]: _type = "Task" [ 742.739990] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.756472] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d1a887-dce5-e96b-4f93-ed61f4ec5aa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.807461] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 742.807826] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 742.808023] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Deleting the datastore file [datastore1] 1342d15d-fbef-4709-adf6-f827bc13d3ca {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.808353] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fba9d8f-3136-4a14-8c6a-ba80390f6849 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.817602] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for the task: (returnval){ [ 742.817602] env[62914]: value = "task-4831704" [ 742.817602] env[62914]: _type = "Task" [ 742.817602] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.827406] env[62914]: INFO nova.compute.manager [-] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Took 2.40 seconds to deallocate network for instance. [ 742.829074] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.919375] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 743.051937] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Successfully created port: 04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.084108] env[62914]: DEBUG nova.compute.utils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.093043] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 743.093252] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 743.110921] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831702, 'name': ReconfigVM_Task, 'duration_secs': 0.714462} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.111278] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Reconfigured VM instance instance-0000002a to attach disk [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.112184] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11c1ff4e-9d4f-46be-8d46-137d6961dfd2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.123224] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 743.123224] env[62914]: value = "task-4831705" [ 743.123224] env[62914]: _type = "Task" [ 743.123224] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.144132] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831703, 'name': PowerOffVM_Task, 'duration_secs': 0.42949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.149237] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 743.149553] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 743.152694] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831705, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.153972] env[62914]: DEBUG nova.compute.manager [req-d1f5d75e-37f0-433d-bcb7-d4f5ccaf75f0 req-40332b85-7a5d-4e91-88f4-42a89b91cbc8 service nova] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Received event network-vif-deleted-770e30b2-2f05-4531-b9d0-6482b3d18b22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 743.154623] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 743.154875] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-168b6718-5101-49b3-870a-511f1a4d681d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.157222] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57e81320-db57-45d1-96e8-4c03a08c33c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.167455] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 743.167455] env[62914]: value = "task-4831706" [ 743.167455] env[62914]: _type = "Task" [ 743.167455] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.178960] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.230470] env[62914]: DEBUG nova.policy [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c808065add174d3ba7a5973fe26c755c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d3a3c33ee0c43e981a93d51f5779c10', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 743.244291] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 743.244291] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 743.244291] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleting the datastore file [datastore1] 54185b06-7ccb-4740-a6ee-213bbfa6365b {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.254426] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee00b808-07c7-4ec2-b08f-5634a047b95e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.265932] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d1a887-dce5-e96b-4f93-ed61f4ec5aa5, 'name': SearchDatastore_Task, 'duration_secs': 0.016482} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.265932] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.265932] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.266591] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for the task: (returnval){ [ 743.266591] env[62914]: value = "task-4831708" [ 743.266591] env[62914]: _type = "Task" [ 743.266591] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.266591] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2be03b21-1914-49f2-8c56-e5df4989b812 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.291704] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 743.291704] env[62914]: value = "task-4831709" [ 743.291704] env[62914]: _type = "Task" [ 743.291704] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.291704] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.305030] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.333323] env[62914]: DEBUG oslo_vmware.api [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Task: {'id': task-4831704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445488} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.336392] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.336673] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.336861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 743.337063] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 743.337242] env[62914]: INFO nova.compute.manager [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Took 3.08 seconds to destroy the instance on the hypervisor. [ 743.338237] env[62914]: DEBUG oslo.service.loopingcall [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.338237] env[62914]: DEBUG nova.compute.manager [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 743.338237] env[62914]: DEBUG nova.network.neutron [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.451992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.589990] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 743.651023] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831705, 'name': Rename_Task, 'duration_secs': 0.200623} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.651791] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 743.653232] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f84ef4e2-46b9-4cee-b501-e8e3b84ccf1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.671156] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 743.671156] env[62914]: value = "task-4831710" [ 743.671156] env[62914]: _type = "Task" [ 743.671156] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.701851] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831710, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.701851] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831706, 'name': PowerOffVM_Task, 'duration_secs': 0.311615} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.701851] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 743.705693] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e346130-0280-43e3-8893-9ee7fda464b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.734495] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9750cfc-a871-46d7-a348-4421384a0b65 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.788523] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 743.788523] env[62914]: DEBUG oslo_vmware.api [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Task: {'id': task-4831708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43815} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.788523] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-663a76ae-5ff2-4105-a28a-a3310f09347e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.790390] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.790685] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 743.790876] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 743.790972] env[62914]: INFO nova.compute.manager [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Took 1.25 seconds to destroy the instance on the hypervisor. [ 743.791240] env[62914]: DEBUG oslo.service.loopingcall [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.791852] env[62914]: DEBUG nova.compute.manager [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 743.791940] env[62914]: DEBUG nova.network.neutron [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.803360] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 743.803360] env[62914]: value = "task-4831711" [ 743.803360] env[62914]: _type = "Task" [ 743.803360] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.810322] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.820331] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 743.820586] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.820865] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.821028] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.821212] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.821743] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df289566-2c39-4bf4-b5a4-d2809f709681 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.836298] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.836495] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 743.840023] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a456f9-2339-46a0-a03f-4dd9b26f15b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.843668] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 743.843668] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233e799-9630-d655-584a-dc85cc517ddd" [ 743.843668] env[62914]: _type = "Task" [ 743.843668] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.854239] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233e799-9630-d655-584a-dc85cc517ddd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.189976] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f70f4e1-3dd0-4420-8112-bebc32a17d49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.199741] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831710, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.203053] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4978654e-abc5-4ae1-9217-d203291c26bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.243703] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2285de98-54f8-4a8b-a4df-0289a81f905f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.257535] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c502528-8c3d-4745-ad47-63afeea16e0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.277739] env[62914]: DEBUG nova.compute.provider_tree [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.310055] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.356033] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233e799-9630-d655-584a-dc85cc517ddd, 'name': SearchDatastore_Task, 'duration_secs': 0.015336} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.357134] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4571f104-f448-48d2-9a49-980eea1ed09d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.364076] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 744.364076] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c893a-5f41-6872-060f-5d002bcf771c" [ 744.364076] env[62914]: _type = "Task" [ 744.364076] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.375625] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c893a-5f41-6872-060f-5d002bcf771c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.604228] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 744.640774] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 744.641265] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 744.641882] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.641882] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 744.641882] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.642065] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 744.643578] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 744.643809] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 744.643993] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 744.644201] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 744.644981] env[62914]: DEBUG nova.virt.hardware [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 744.646013] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a9601c-9bee-47bd-9f01-8a775365a4e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.657775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51397601-0e7a-4694-9876-71ba5964ffb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.691343] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831710, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.751680] env[62914]: DEBUG nova.network.neutron [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.781047] env[62914]: DEBUG nova.scheduler.client.report [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 744.803714] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831709, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.836131] env[62914]: INFO nova.compute.manager [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Rebuilding instance [ 744.879186] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c893a-5f41-6872-060f-5d002bcf771c, 'name': SearchDatastore_Task, 'duration_secs': 0.011274} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.879693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.879856] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 744.880510] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52af1c4f-02cf-4cfd-a672-06ba4130db44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.888947] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 744.888947] env[62914]: value = "task-4831712" [ 744.888947] env[62914]: _type = "Task" [ 744.888947] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.906904] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.911852] env[62914]: DEBUG nova.compute.manager [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 744.911852] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c09b29c-bec8-4e1b-ab64-a496c0068af3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.109478] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Successfully created port: 29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.190416] env[62914]: DEBUG oslo_vmware.api [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831710, 'name': PowerOnVM_Task, 'duration_secs': 1.381504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.190747] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 745.190966] env[62914]: INFO nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Took 8.65 seconds to spawn the instance on the hypervisor. [ 745.191179] env[62914]: DEBUG nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 745.191985] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d96b0-af6a-45a4-a09e-afd1842ef0e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.201927] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.205025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.205025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.205025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.205025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.205025] env[62914]: INFO nova.compute.manager [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Terminating instance [ 745.206354] env[62914]: DEBUG nova.compute.manager [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 745.206541] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.207347] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2071852b-e442-4025-93ab-f28153b6a890 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.217155] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 745.217698] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1560fefa-7ba0-4f90-874f-9956c4fa3524 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.259804] env[62914]: INFO nova.compute.manager [-] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Took 1.92 seconds to deallocate network for instance. [ 745.288275] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.288598] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 745.292961] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.424s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.293245] env[62914]: DEBUG nova.objects.instance [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lazy-loading 'resources' on Instance uuid 3eff61b1-b09c-4a04-821c-cefdc7be3f64 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.306581] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831709, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.521261} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.306789] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 745.307018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.307304] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cec31a78-1994-4649-ac53-951a68e5256e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.323296] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 745.323296] env[62914]: value = "task-4831714" [ 745.323296] env[62914]: _type = "Task" [ 745.323296] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.325111] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 745.325399] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 745.325590] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore2] 12aa02f0-a232-427a-80ba-1faa12c4d43a {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.329133] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85e8e193-9680-4a7e-8255-a3d6b11d741e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.342745] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.344810] env[62914]: DEBUG oslo_vmware.api [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 745.344810] env[62914]: value = "task-4831715" [ 745.344810] env[62914]: _type = "Task" [ 745.344810] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.359087] env[62914]: DEBUG oslo_vmware.api [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.403321] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831712, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.428836] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 745.429243] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e414443-06ab-4d3e-91a6-cc1e556c3d8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.438225] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 745.438225] env[62914]: value = "task-4831716" [ 745.438225] env[62914]: _type = "Task" [ 745.438225] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.450339] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.721047] env[62914]: INFO nova.compute.manager [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Took 42.48 seconds to build instance. [ 745.773638] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.804212] env[62914]: DEBUG nova.compute.utils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.805691] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 745.805863] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 745.840115] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094113} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.840379] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.841936] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c10a673-a5db-4fa1-9fb1-82faad6eb565 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.878281] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.879621] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-068004c3-11af-420c-b25f-35ca9cb45260 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.903613] env[62914]: DEBUG oslo_vmware.api [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.914839] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850376} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.916806] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 745.917251] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 745.917251] env[62914]: value = "task-4831717" [ 745.917251] env[62914]: _type = "Task" [ 745.917251] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.918063] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda940b0-a448-40f1-9f28-346e7d6b201a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.955207] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.965130] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-921c11ae-dde3-4589-a8fc-2f506fc94b64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.978946] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.986531] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831716, 'name': PowerOffVM_Task, 'duration_secs': 0.479907} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.987882] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 745.987882] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.987882] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 745.987882] env[62914]: value = "task-4831718" [ 745.987882] env[62914]: _type = "Task" [ 745.987882] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.988871] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632f8303-99cb-4016-83cc-001abd68a8d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.004652] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831718, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.004652] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 746.004652] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7cd09da-a06b-4915-9fa4-74192bb5ff99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.028754] env[62914]: DEBUG nova.policy [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1305ed7d6c28421e93b3a8e31739df7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cda9ee54ad14f479838a54276dac349', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.097126] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 746.097364] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 746.097588] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.097923] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e769565-23b8-4859-b1cc-c8bd45e29fc0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.113684] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 746.113684] env[62914]: value = "task-4831720" [ 746.113684] env[62914]: _type = "Task" [ 746.113684] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.121951] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.225867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-beaddb8c-139c-4ea8-b2e1-e6fad3cf2d8d tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.785s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.313052] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 746.368463] env[62914]: DEBUG oslo_vmware.api [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.670229} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.369944] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.369944] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 746.369944] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.369944] env[62914]: INFO nova.compute.manager [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 746.369944] env[62914]: DEBUG oslo.service.loopingcall [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.370286] env[62914]: DEBUG nova.compute.manager [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 746.370286] env[62914]: DEBUG nova.network.neutron [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.432467] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.455696] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8fb8be-2134-4a5e-a6b9-3e4bfba7716e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.471173] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13eb5ce-0a3c-407e-9171-5675d9df9652 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.476295] env[62914]: DEBUG nova.compute.manager [req-3faeaaea-769d-411d-b7a9-dd16b07bedb9 req-d1b925c2-7e5e-41b5-b22c-3a547be797a2 service nova] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Received event network-vif-deleted-6fb0a3a2-3560-49e6-85bc-32473f0b2985 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 746.521856] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0416ee32-20c2-410a-97b5-9418f4df3ec8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.533824] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eaebbf-d008-472a-8afd-3edf113cb3e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.538233] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.551415] env[62914]: DEBUG nova.compute.provider_tree [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.624059] env[62914]: DEBUG nova.network.neutron [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.628414] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497891} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.628414] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.628414] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 746.628414] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.729439] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 746.935290] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831717, 'name': ReconfigVM_Task, 'duration_secs': 0.732832} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.935645] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Reconfigured VM instance instance-00000023 to attach disk [datastore2] cead3557-080d-4956-a957-cac449bb69f6/cead3557-080d-4956-a957-cac449bb69f6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.936318] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-180d0ce8-1ea3-4a71-a1a6-0475b6057ada {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.945095] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 746.945095] env[62914]: value = "task-4831721" [ 746.945095] env[62914]: _type = "Task" [ 746.945095] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.954181] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831721, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.031602] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831718, 'name': ReconfigVM_Task, 'duration_secs': 0.818643} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.031973] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.032749] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2ba06e-5fff-4dfa-8524-265cba6d640c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.063846] env[62914]: DEBUG nova.scheduler.client.report [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.074170] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54b68e0d-f304-43b5-8fa2-9ee38d307aed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.091103] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 747.091103] env[62914]: value = "task-4831722" [ 747.091103] env[62914]: _type = "Task" [ 747.091103] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.102011] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.129331] env[62914]: INFO nova.compute.manager [-] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Took 3.34 seconds to deallocate network for instance. [ 747.145860] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Successfully updated port: e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.304713] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.328268] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 747.366187] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.366450] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.366618] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.366849] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.367018] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.369294] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.369626] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.370205] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.371143] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.371371] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.371598] env[62914]: DEBUG nova.virt.hardware [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.373910] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f6068b-9b23-48a6-9ae6-508b9980317c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.389632] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbc7a39-fca3-428e-9d00-2d6ed48bd653 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.394939] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Successfully created port: c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.456666] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831721, 'name': Rename_Task, 'duration_secs': 0.31333} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.458334] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 747.458334] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-044c39d8-e295-4d48-9875-73f31b2cd231 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.465611] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Waiting for the task: (returnval){ [ 747.465611] env[62914]: value = "task-4831723" [ 747.465611] env[62914]: _type = "Task" [ 747.465611] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.474467] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.533059] env[62914]: DEBUG nova.network.neutron [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.588023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.292s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.593480] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.479s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.596199] env[62914]: INFO nova.compute.claims [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.613040] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831722, 'name': ReconfigVM_Task, 'duration_secs': 0.240616} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.613596] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 747.614194] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3639d83-e41f-44ce-93bd-c191f4875d16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.625689] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 747.625689] env[62914]: value = "task-4831724" [ 747.625689] env[62914]: _type = "Task" [ 747.625689] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.642306] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.642306] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.661812] env[62914]: INFO nova.scheduler.client.report [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Deleted allocations for instance 3eff61b1-b09c-4a04-821c-cefdc7be3f64 [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.699162] env[62914]: DEBUG nova.virt.hardware [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.699162] env[62914]: DEBUG nova.compute.manager [req-e6b47253-80d9-41dc-9256-c3a757c1429f req-53ba3155-357c-4f51-bef6-987760989efa service nova] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Received event network-vif-deleted-ba331d08-bcc1-4f3e-b972-ee660dd6c0db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 747.700259] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ae610c-b55a-4f9f-9053-c928abf6c7ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.705432] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Successfully updated port: 29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.715294] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebde2d6-1ab1-49cc-b2de-923eebf3840d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.735562] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:fe:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48f699bf-5203-47d2-88d5-9747169234ea', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.748115] env[62914]: DEBUG oslo.service.loopingcall [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.749768] env[62914]: DEBUG oslo_concurrency.lockutils [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.750025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.750257] env[62914]: DEBUG nova.compute.manager [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 747.750469] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 747.751501] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0deba02-e4e9-487b-90eb-79db37eb4555 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.754830] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b667b9d7-3b27-4d61-bee3-8e18036559e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.774693] env[62914]: DEBUG nova.compute.manager [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 747.775680] env[62914]: DEBUG nova.objects.instance [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.778236] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.778236] env[62914]: value = "task-4831725" [ 747.778236] env[62914]: _type = "Task" [ 747.778236] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.790898] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831725, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.896507] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.896909] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.976691] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831723, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.039839] env[62914]: INFO nova.compute.manager [-] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Took 1.67 seconds to deallocate network for instance. [ 748.141059] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.178936] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9557f973-d475-4bc2-a71e-1fa925bb05e4 tempest-ServersAdminNegativeTestJSON-1035945787 tempest-ServersAdminNegativeTestJSON-1035945787-project-member] Lock "3eff61b1-b09c-4a04-821c-cefdc7be3f64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.492s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.212064] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.212244] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquired lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.212376] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.282501] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 748.286302] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38c09bca-dd2f-4b60-8be8-0a449bef2d6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.296782] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831725, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.299051] env[62914]: DEBUG oslo_vmware.api [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 748.299051] env[62914]: value = "task-4831726" [ 748.299051] env[62914]: _type = "Task" [ 748.299051] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.311161] env[62914]: DEBUG oslo_vmware.api [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.480381] env[62914]: DEBUG oslo_vmware.api [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Task: {'id': task-4831723, 'name': PowerOnVM_Task, 'duration_secs': 0.739201} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.480381] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 748.480381] env[62914]: DEBUG nova.compute.manager [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 748.481218] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a9b841-eb38-4b54-ba05-c427b735ca3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.547445] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.642676] env[62914]: DEBUG oslo_vmware.api [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831724, 'name': PowerOnVM_Task, 'duration_secs': 0.780682} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.642676] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 748.648629] env[62914]: DEBUG nova.compute.manager [None req-50486fcd-1055-4306-bc1a-2d4bc25b18bf tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 748.649653] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9bf523-1200-4494-87d2-895a8a62edee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.776355] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.797046] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831725, 'name': CreateVM_Task, 'duration_secs': 0.555937} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.800910] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 748.802300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.802420] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.802751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 748.810024] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ba19390-f098-4c40-94e9-65979c111d43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.813692] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 748.813692] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ed726f-63f3-7296-4666-974bd5193e01" [ 748.813692] env[62914]: _type = "Task" [ 748.813692] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.820162] env[62914]: DEBUG oslo_vmware.api [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831726, 'name': PowerOffVM_Task, 'duration_secs': 0.310712} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.823885] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 748.824119] env[62914]: DEBUG nova.compute.manager [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 748.825349] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e534f9f-5371-43ad-ac76-07b9aecee52e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.834038] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ed726f-63f3-7296-4666-974bd5193e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.005187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.117614] env[62914]: DEBUG nova.network.neutron [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updating instance_info_cache with network_info: [{"id": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "address": "fa:16:3e:86:1d:48", "network": {"id": "2ade9d92-db6f-492b-b2fc-c0b736f0a7e5", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1526613118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d3a3c33ee0c43e981a93d51f5779c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29704154-55", "ovs_interfaceid": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.165988] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2216a9e7-d02e-459f-bcdc-0154c3b9a5b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.181987] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f008b1cf-71ac-4443-96b3-8cf411c8aaba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.224625] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827077db-de3b-4057-99b8-772604e98add {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.237682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd7babc-f572-4d31-9dbe-7a4a6bfc25b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.254526] env[62914]: DEBUG nova.compute.provider_tree [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.334031] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ed726f-63f3-7296-4666-974bd5193e01, 'name': SearchDatastore_Task, 'duration_secs': 0.040491} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.335128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.335128] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.335128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.335128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.335353] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.335603] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a0b1c0d-d067-4ae4-bdf7-346a0fbc43cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.346936] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.346936] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 749.347696] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54fbc51c-8ac1-4816-9564-4cbed13f3ef8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.352403] env[62914]: DEBUG oslo_concurrency.lockutils [None req-758f991e-6c0e-47bb-816a-a4e7a3932aac tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.602s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.359776] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 749.359776] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52457184-f4d8-8837-29e8-8764b4818e4c" [ 749.359776] env[62914]: _type = "Task" [ 749.359776] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.371170] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52457184-f4d8-8837-29e8-8764b4818e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.626659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Releasing lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.627014] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Instance network_info: |[{"id": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "address": "fa:16:3e:86:1d:48", "network": {"id": "2ade9d92-db6f-492b-b2fc-c0b736f0a7e5", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1526613118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d3a3c33ee0c43e981a93d51f5779c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29704154-55", "ovs_interfaceid": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 749.627489] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:1d:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29704154-556c-4ee1-a5d2-fafcd0ac6017', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.637056] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Creating folder: Project (0d3a3c33ee0c43e981a93d51f5779c10). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.637439] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6b93979-6f40-4b78-ad63-cca74ef3ebe2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.650498] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Created folder: Project (0d3a3c33ee0c43e981a93d51f5779c10) in parent group-v941773. [ 749.651213] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Creating folder: Instances. Parent ref: group-v941907. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.651532] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e473c02-37de-4120-8d14-1eeca74cea69 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.664383] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Created folder: Instances in parent group-v941907. [ 749.667483] env[62914]: DEBUG oslo.service.loopingcall [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.667483] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 749.667483] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b9bd2c8-f824-4574-834b-a23301fc6589 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.690554] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.690554] env[62914]: value = "task-4831729" [ 749.690554] env[62914]: _type = "Task" [ 749.690554] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.701670] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831729, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.758410] env[62914]: DEBUG nova.scheduler.client.report [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.872949] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52457184-f4d8-8837-29e8-8764b4818e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.029521} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.873869] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9619cd41-a354-48a2-9c61-e945b4bd9e13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.881564] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 749.881564] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529aa5cd-91b0-352c-643f-8a12e9ef8984" [ 749.881564] env[62914]: _type = "Task" [ 749.881564] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.894176] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529aa5cd-91b0-352c-643f-8a12e9ef8984, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.920437] env[62914]: DEBUG nova.compute.manager [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 750.082218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.082494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.204098] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831729, 'name': CreateVM_Task, 'duration_secs': 0.50923} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.204292] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 750.205376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.205551] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.205880] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 750.206267] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f509654d-dfd2-42f4-9359-59b86a12bfa2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.211948] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 750.211948] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52481b56-aad5-06e3-2be2-a76704e76bd4" [ 750.211948] env[62914]: _type = "Task" [ 750.211948] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.225897] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52481b56-aad5-06e3-2be2-a76704e76bd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.266559] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.269447] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 750.271284] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.149s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.272783] env[62914]: INFO nova.compute.claims [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.395718] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529aa5cd-91b0-352c-643f-8a12e9ef8984, 'name': SearchDatastore_Task, 'duration_secs': 0.026642} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.396036] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.396562] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 750.396859] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6399beeb-9cb7-4893-bb9a-16d0afd3248d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.405415] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 750.405415] env[62914]: value = "task-4831730" [ 750.405415] env[62914]: _type = "Task" [ 750.405415] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.417662] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.457557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.701932] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Successfully updated port: c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.731807] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52481b56-aad5-06e3-2be2-a76704e76bd4, 'name': SearchDatastore_Task, 'duration_secs': 0.052648} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.732860] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.734901] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.734901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.734901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.734901] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.734901] env[62914]: DEBUG nova.compute.manager [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-vif-plugged-e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 750.735118] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Acquiring lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.735190] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.735343] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.735493] env[62914]: DEBUG nova.compute.manager [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] No waiting events found dispatching network-vif-plugged-e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 750.736128] env[62914]: WARNING nova.compute.manager [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received unexpected event network-vif-plugged-e8918472-71f8-4ab8-ae0e-d5333ff21e08 for instance with vm_state building and task_state spawning. [ 750.736128] env[62914]: DEBUG nova.compute.manager [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-changed-e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 750.736307] env[62914]: DEBUG nova.compute.manager [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Refreshing instance network info cache due to event network-changed-e8918472-71f8-4ab8-ae0e-d5333ff21e08. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 750.736473] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Acquiring lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.736617] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Acquired lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.736781] env[62914]: DEBUG nova.network.neutron [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Refreshing network info cache for port e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 750.743367] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5f0d721-5168-4d3b-ae6d-68c4c9da549f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.769855] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.770348] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 750.771536] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2165633-f0a0-49fe-b164-de58c1e2895a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.780335] env[62914]: DEBUG nova.compute.utils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.783750] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 750.783750] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 750.785721] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 750.785721] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525df4ff-a9d4-e918-1eb5-49ad0bfa60ab" [ 750.785721] env[62914]: _type = "Task" [ 750.785721] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.797666] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525df4ff-a9d4-e918-1eb5-49ad0bfa60ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.928894] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831730, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.996832] env[62914]: DEBUG nova.policy [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '021e6445fa3144688f871a39e8317de0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23ba9ece80a24353ac072b643cb16df7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 751.205958] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.206225] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.206279] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 751.265857] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Successfully updated port: 04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 751.287221] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 751.307793] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525df4ff-a9d4-e918-1eb5-49ad0bfa60ab, 'name': SearchDatastore_Task, 'duration_secs': 0.070228} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.308774] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff755a5d-87dd-4122-8eac-790ba701d3fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.317702] env[62914]: DEBUG nova.network.neutron [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.325940] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 751.325940] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ee479f-998f-1658-e016-85e18f64a167" [ 751.325940] env[62914]: _type = "Task" [ 751.325940] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.339488] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ee479f-998f-1658-e016-85e18f64a167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.429155] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831730, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.526424] env[62914]: DEBUG nova.network.neutron [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.582665] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.582951] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.773859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.798116] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.852651] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ee479f-998f-1658-e016-85e18f64a167, 'name': SearchDatastore_Task, 'duration_secs': 0.023625} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.852651] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.853960] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 76dfbf82-0ed0-4621-890c-060b187b47e0/76dfbf82-0ed0-4621-890c-060b187b47e0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 751.853960] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b56744f5-48ed-4b58-ad2f-9f740ecfa1a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.865486] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 751.865486] env[62914]: value = "task-4831731" [ 751.865486] env[62914]: _type = "Task" [ 751.865486] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.893908] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.906889] env[62914]: DEBUG nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Received event network-vif-deleted-6faadfd4-7bac-4fbc-a32f-f2742bdff1f6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 751.906889] env[62914]: DEBUG nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Received event network-vif-plugged-29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 751.906889] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Acquiring lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.907389] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.907701] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.907922] env[62914]: DEBUG nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] No waiting events found dispatching network-vif-plugged-29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 751.908129] env[62914]: WARNING nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Received unexpected event network-vif-plugged-29704154-556c-4ee1-a5d2-fafcd0ac6017 for instance with vm_state building and task_state spawning. [ 751.908319] env[62914]: DEBUG nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Received event network-changed-29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 751.908522] env[62914]: DEBUG nova.compute.manager [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Refreshing instance network info cache due to event network-changed-29704154-556c-4ee1-a5d2-fafcd0ac6017. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 751.908746] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Acquiring lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.908885] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Acquired lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.909319] env[62914]: DEBUG nova.network.neutron [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Refreshing network info cache for port 29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 751.936054] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831730, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.525361} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.936054] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 751.936054] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.936447] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce660d4f-dd80-4723-b65c-520cf112b793 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.952300] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 751.952300] env[62914]: value = "task-4831732" [ 751.952300] env[62914]: _type = "Task" [ 751.952300] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.970460] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.039655] env[62914]: DEBUG oslo_concurrency.lockutils [req-57ddf483-22ed-474f-b985-8cc5e0d762b2 req-1b2651fc-a3c0-42e0-9bf3-d3019de7c4c4 service nova] Releasing lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.041011] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.041011] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 752.095719] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ae370e-7a45-4661-9e52-8377afd62cd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.102121] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Successfully created port: 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.107542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ef4502-412a-4e34-9ff7-ab19b4e3d8c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.150423] env[62914]: INFO nova.compute.manager [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Rebuilding instance [ 752.155098] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d812c8-3458-43b4-ba20-9f029eb15963 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.166707] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ea16e2-dcc9-47d2-86c7-90ff23fff19d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.184209] env[62914]: DEBUG nova.compute.provider_tree [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.192607] env[62914]: DEBUG nova.network.neutron [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updating instance_info_cache with network_info: [{"id": "c68776d2-73ad-4ec2-b114-31f5878098d8", "address": "fa:16:3e:20:21:c8", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68776d2-73", "ovs_interfaceid": "c68776d2-73ad-4ec2-b114-31f5878098d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.225995] env[62914]: DEBUG nova.compute.manager [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 752.227239] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add42bf4-e0a8-4ed3-a46f-c089ca1407dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.304195] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.342220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 752.345494] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 752.345657] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 752.345847] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 752.346073] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 752.346220] env[62914]: DEBUG nova.virt.hardware [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.348484] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57499c03-e95a-4dab-b426-9aa06c714f1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.362775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a6f310-34a9-424d-b863-3e686d255706 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.390827] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.465107] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213606} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.465107] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.465400] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bdd351-6543-405b-9690-2d0778e01ec3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.496960] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.500144] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9712751f-6aba-4e5a-9cda-55a7d70b955e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.528031] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 752.528031] env[62914]: value = "task-4831733" [ 752.528031] env[62914]: _type = "Task" [ 752.528031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.539158] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.690151] env[62914]: DEBUG nova.scheduler.client.report [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 752.702191] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.702191] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Instance network_info: |[{"id": "c68776d2-73ad-4ec2-b114-31f5878098d8", "address": "fa:16:3e:20:21:c8", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68776d2-73", "ovs_interfaceid": "c68776d2-73ad-4ec2-b114-31f5878098d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 752.702191] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:21:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bf86b133-2b7b-4cab-8f6f-5a0856d34c7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c68776d2-73ad-4ec2-b114-31f5878098d8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.710258] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Creating folder: Project (7cda9ee54ad14f479838a54276dac349). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 752.710644] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9abc5a4-742d-440e-86cd-4c0ae4496bba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.727283] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Created folder: Project (7cda9ee54ad14f479838a54276dac349) in parent group-v941773. [ 752.727729] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Creating folder: Instances. Parent ref: group-v941910. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 752.727809] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fcbd8ff2-a0a6-4017-b7c5-4cb5744e7736 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.740584] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 752.741280] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29fa05fb-2678-4670-909c-426147ab7503 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.755124] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 752.755124] env[62914]: value = "task-4831736" [ 752.755124] env[62914]: _type = "Task" [ 752.755124] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.755430] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Created folder: Instances in parent group-v941910. [ 752.755595] env[62914]: DEBUG oslo.service.loopingcall [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.756938] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 752.760757] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e153748-fb7e-4d99-9104-f9a35ff2f47f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.777436] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.791888] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.793621] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.793621] env[62914]: value = "task-4831737" [ 752.793621] env[62914]: _type = "Task" [ 752.793621] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.809413] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.867064] env[62914]: DEBUG nova.network.neutron [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updated VIF entry in instance network info cache for port 29704154-556c-4ee1-a5d2-fafcd0ac6017. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 752.867470] env[62914]: DEBUG nova.network.neutron [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updating instance_info_cache with network_info: [{"id": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "address": "fa:16:3e:86:1d:48", "network": {"id": "2ade9d92-db6f-492b-b2fc-c0b736f0a7e5", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1526613118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d3a3c33ee0c43e981a93d51f5779c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29704154-55", "ovs_interfaceid": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.872280] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "cead3557-080d-4956-a957-cac449bb69f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.872645] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.873038] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "cead3557-080d-4956-a957-cac449bb69f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.873181] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.873453] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.881680] env[62914]: INFO nova.compute.manager [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Terminating instance [ 752.887971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "refresh_cache-cead3557-080d-4956-a957-cac449bb69f6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.888252] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquired lock "refresh_cache-cead3557-080d-4956-a957-cac449bb69f6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.888474] env[62914]: DEBUG nova.network.neutron [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 752.894994] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.042231] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831733, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.198805] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.927s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.201515] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 753.204632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.146s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.204822] env[62914]: DEBUG nova.objects.instance [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 753.269199] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831736, 'name': PowerOffVM_Task, 'duration_secs': 0.299272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.271618] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 753.271618] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 753.271618] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b099189-285a-4259-ba21-69fe459af23d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.280551] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 753.280923] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bd60c99-0d0b-49ac-a3fc-586bc33afb0c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.306320] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.314898] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 753.315239] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 753.315453] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Deleting the datastore file [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.315785] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f7055bf-3326-44da-a0a5-48c6c182ac0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.324958] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 753.324958] env[62914]: value = "task-4831739" [ 753.324958] env[62914]: _type = "Task" [ 753.324958] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.347784] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.371019] env[62914]: DEBUG oslo_concurrency.lockutils [req-570b1092-5112-406f-ab02-9168ce123ab8 req-d365a99e-d9f4-46cf-b407-09e92d44c227 service nova] Releasing lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.389281] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.419111] env[62914]: DEBUG nova.network.neutron [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 753.485888] env[62914]: DEBUG nova.network.neutron [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.539206] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831733, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.576764] env[62914]: DEBUG nova.network.neutron [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Updating instance_info_cache with network_info: [{"id": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "address": "fa:16:3e:ae:6f:a8", "network": {"id": "859729e6-4f73-427e-af96-b427481b7a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1677619083", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8918472-71", "ovs_interfaceid": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04bf9072-5af6-410e-bc35-bcd17631d744", "address": "fa:16:3e:8f:3c:93", "network": {"id": "dd9152e7-5ed0-43e1-801e-a661156063bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-369016139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04bf9072-5a", "ovs_interfaceid": "04bf9072-5af6-410e-bc35-bcd17631d744", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.709545] env[62914]: DEBUG nova.compute.utils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 753.716623] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 753.716623] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 753.806023] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.832340] env[62914]: DEBUG nova.policy [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 753.839913] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275661} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.842504] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.842830] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 753.843057] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 753.848193] env[62914]: DEBUG nova.compute.manager [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Stashing vm_state: stopped {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 753.852655] env[62914]: INFO nova.compute.manager [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Unrescuing [ 753.852932] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.854519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquired lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.854519] env[62914]: DEBUG nova.network.neutron [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 753.891503] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.988697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Releasing lock "refresh_cache-cead3557-080d-4956-a957-cac449bb69f6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.989708] env[62914]: DEBUG nova.compute.manager [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 753.989941] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 753.991249] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fbf9ae-65db-4e43-ad8f-42eb7c95003e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.007426] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 754.007787] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df5c915b-36c8-4e7c-9cad-753853eb2d89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.016031] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 754.016031] env[62914]: value = "task-4831740" [ 754.016031] env[62914]: _type = "Task" [ 754.016031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.025732] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.039700] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831733, 'name': ReconfigVM_Task, 'duration_secs': 1.052177} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.042647] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158/5a704020-921e-4ede-9fd9-b745c027a158.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.042647] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49184641-4d30-4d6c-b1f3-c2b3e20286db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.048723] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 754.048723] env[62914]: value = "task-4831741" [ 754.048723] env[62914]: _type = "Task" [ 754.048723] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.062277] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831741, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.080624] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.080624] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance network_info: |[{"id": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "address": "fa:16:3e:ae:6f:a8", "network": {"id": "859729e6-4f73-427e-af96-b427481b7a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1677619083", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8918472-71", "ovs_interfaceid": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04bf9072-5af6-410e-bc35-bcd17631d744", "address": "fa:16:3e:8f:3c:93", "network": {"id": "dd9152e7-5ed0-43e1-801e-a661156063bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-369016139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04bf9072-5a", "ovs_interfaceid": "04bf9072-5af6-410e-bc35-bcd17631d744", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 754.081306] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:6f:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1ce8361b-fd8e-4971-a37f-b84a4f77db19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8918472-71f8-4ab8-ae0e-d5333ff21e08', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:3c:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04bf9072-5af6-410e-bc35-bcd17631d744', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.091436] env[62914]: DEBUG oslo.service.loopingcall [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.091730] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 754.092124] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f0d6c15-5762-443d-8c46-b400c9f9d825 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.122086] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.122086] env[62914]: value = "task-4831742" [ 754.122086] env[62914]: _type = "Task" [ 754.122086] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.133704] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831742, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.218557] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 754.224650] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7abad9cd-fc27-432a-a1e9-e2cdbbcd1368 tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.225490] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.113s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.225722] env[62914]: DEBUG nova.objects.instance [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lazy-loading 'resources' on Instance uuid bf2e9634-66ee-4b6a-a148-bc77420d793f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 754.308549] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.383923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.390824] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.528576] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831740, 'name': PowerOffVM_Task, 'duration_secs': 0.210743} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.528865] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 754.529046] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 754.529324] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e80cf7d-66c8-46cd-a652-9b7813e67fe8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.559712] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 754.559712] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 754.560027] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Deleting the datastore file [datastore2] cead3557-080d-4956-a957-cac449bb69f6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.565479] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf1cbcbc-26e9-429f-aa0f-8e55ad55ec74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.567981] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831741, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.575888] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for the task: (returnval){ [ 754.575888] env[62914]: value = "task-4831744" [ 754.575888] env[62914]: _type = "Task" [ 754.575888] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.588249] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.636294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.636695] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.636901] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831742, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.677875] env[62914]: DEBUG nova.network.neutron [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updating instance_info_cache with network_info: [{"id": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "address": "fa:16:3e:9d:cf:6f", "network": {"id": "0c435810-7184-4dcd-bd3f-21e08f5747b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-983930408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "df7ae349aea0487d88689eb09933eb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c2416f-fd", "ovs_interfaceid": "f2c2416f-fd5d-479b-b87b-5c00e77e23d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.815026] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.854745] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Successfully created port: 392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.890016] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.900638] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.900949] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.901129] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.901330] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.901520] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.901655] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.901963] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.904912] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.905038] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.905227] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.905351] env[62914]: DEBUG nova.virt.hardware [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.906626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccaa4dd-fcb3-4d09-8ebd-5a12246cd382 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.916690] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Successfully updated port: 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.926992] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d308f77-065a-45f6-a256-6dc7e4070ada {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.954166] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.959989] env[62914]: DEBUG oslo.service.loopingcall [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.960626] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 754.961521] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-202350ff-40fb-4a9d-842e-517f539cf2bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.983012] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.983012] env[62914]: value = "task-4831745" [ 754.983012] env[62914]: _type = "Task" [ 754.983012] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.992936] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831745, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.062385] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received event network-vif-plugged-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 755.063091] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquiring lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.063380] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.063606] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.063763] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] No waiting events found dispatching network-vif-plugged-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 755.063942] env[62914]: WARNING nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received unexpected event network-vif-plugged-c68776d2-73ad-4ec2-b114-31f5878098d8 for instance with vm_state building and task_state spawning. [ 755.064139] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-vif-plugged-04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 755.064313] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquiring lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.064508] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.064679] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.064866] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] No waiting events found dispatching network-vif-plugged-04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 755.065042] env[62914]: WARNING nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received unexpected event network-vif-plugged-04bf9072-5af6-410e-bc35-bcd17631d744 for instance with vm_state building and task_state spawning. [ 755.065232] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 755.065740] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing instance network info cache due to event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 755.065740] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquiring lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.066043] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquired lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.066261] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 755.072776] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831741, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.089578] env[62914]: DEBUG oslo_vmware.api [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Task: {'id': task-4831744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.090508] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.091431] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 755.091431] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 755.091431] env[62914]: INFO nova.compute.manager [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] [instance: cead3557-080d-4956-a957-cac449bb69f6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 755.091651] env[62914]: DEBUG oslo.service.loopingcall [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.092300] env[62914]: DEBUG nova.compute.manager [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 755.092412] env[62914]: DEBUG nova.network.neutron [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 755.139389] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831742, 'name': CreateVM_Task, 'duration_secs': 0.623508} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.140153] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 755.141088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.141826] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.141826] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 755.141968] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd7ae476-1414-48fd-9fb5-303c25a26c70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.149652] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 755.149652] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ca4b95-ab79-fde7-d726-39d492f69279" [ 755.149652] env[62914]: _type = "Task" [ 755.149652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.153958] env[62914]: DEBUG nova.network.neutron [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.163994] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ca4b95-ab79-fde7-d726-39d492f69279, 'name': SearchDatastore_Task, 'duration_secs': 0.01126} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.167729] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.168053] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.168555] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.168743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.168950] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.169848] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed218dd4-a72e-4b1d-bf4f-b160bdfe1cdd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.180357] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.180577] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 755.185208] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051ba796-c750-4926-976c-aac49d6621a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.188222] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Releasing lock "refresh_cache-8b83f82b-42f7-4f33-abc4-ff278d343309" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.189126] env[62914]: DEBUG nova.objects.instance [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lazy-loading 'flavor' on Instance uuid 8b83f82b-42f7-4f33-abc4-ff278d343309 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.195227] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 755.195227] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52623df3-82ce-692c-c29f-91dd1027119a" [ 755.195227] env[62914]: _type = "Task" [ 755.195227] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.207058] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52623df3-82ce-692c-c29f-91dd1027119a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.235950] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 755.273077] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.273428] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.273579] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.273819] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.273976] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.274424] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.274508] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.274640] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.274821] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.274985] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.275293] env[62914]: DEBUG nova.virt.hardware [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.277052] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d7689e-1465-45e1-a8b0-84cc5d38522d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.289197] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265a39f7-ab75-41cb-8756-2e6448425a94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.316719] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.397548] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831731, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.183751} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.397664] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 76dfbf82-0ed0-4621-890c-060b187b47e0/76dfbf82-0ed0-4621-890c-060b187b47e0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 755.397786] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.398061] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-150cef4f-21af-4b55-bec3-86bec918e289 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.406322] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 755.406322] env[62914]: value = "task-4831746" [ 755.406322] env[62914]: _type = "Task" [ 755.406322] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.413048] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cefbcb-8643-4934-83ce-67ec3e50e47b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.423617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.423617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.423801] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 755.425949] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.430336] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6575e2b-f51c-4f65-a407-205d679cfb1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.472342] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecdd1a2-33fb-4628-b678-039049fdda1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.479609] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1ed8c2-bc49-48dc-882f-4657b8113f54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.511193] env[62914]: DEBUG nova.compute.provider_tree [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.512477] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831745, 'name': CreateVM_Task, 'duration_secs': 0.287296} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.512940] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 755.513390] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.513586] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.514124] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 755.514451] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5470e7f-83ca-4c2c-8179-43244c64badd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.521214] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 755.521214] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b036c2-30f2-c176-32fe-cdab930c1306" [ 755.521214] env[62914]: _type = "Task" [ 755.521214] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.532050] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b036c2-30f2-c176-32fe-cdab930c1306, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.563318] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831741, 'name': Rename_Task, 'duration_secs': 1.174189} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.563510] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 755.563786] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66cd4bd8-933e-4d8a-8118-c7ce767d5157 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.572183] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 755.572183] env[62914]: value = "task-4831747" [ 755.572183] env[62914]: _type = "Task" [ 755.572183] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.587447] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.656964] env[62914]: DEBUG nova.network.neutron [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.698245] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cee9663-2323-46fa-8de5-ec3b75d0cdf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.715036] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52623df3-82ce-692c-c29f-91dd1027119a, 'name': SearchDatastore_Task, 'duration_secs': 0.011248} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.733458] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 755.733458] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9715054c-25d8-4f6a-87a6-b9df7b07ca4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.735298] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c0a51fc-5e54-4bdb-853a-d5befed2279d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.741222] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 755.741222] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52937013-49bd-354f-77bc-38df948f58c8" [ 755.741222] env[62914]: _type = "Task" [ 755.741222] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.746282] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 755.746282] env[62914]: value = "task-4831748" [ 755.746282] env[62914]: _type = "Task" [ 755.746282] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.752931] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52937013-49bd-354f-77bc-38df948f58c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.759362] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.818643] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831737, 'name': CreateVM_Task, 'duration_secs': 2.715035} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.818898] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 755.819777] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.819977] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.820349] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 755.820667] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d223b00f-15b2-4426-8393-3d5eb06ce92c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.829175] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 755.829175] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522420f9-b4ea-3108-1269-6e2e1bcb6bf5" [ 755.829175] env[62914]: _type = "Task" [ 755.829175] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.836536] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522420f9-b4ea-3108-1269-6e2e1bcb6bf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.917302] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119033} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.917856] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.918827] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b838973-aa77-4092-9cac-f46b2c5d2605 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.948982] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 76dfbf82-0ed0-4621-890c-060b187b47e0/76dfbf82-0ed0-4621-890c-060b187b47e0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.951632] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8bf53fb-7b14-411e-92c3-9600e9f40ef4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.974639] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 755.974639] env[62914]: value = "task-4831749" [ 755.974639] env[62914]: _type = "Task" [ 755.974639] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.984735] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.011751] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.015105] env[62914]: DEBUG nova.scheduler.client.report [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.035096] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b036c2-30f2-c176-32fe-cdab930c1306, 'name': SearchDatastore_Task, 'duration_secs': 0.010108} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.035580] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.035903] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.036394] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.085848] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831747, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.090683] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "10102941-c31a-4ab1-be5a-801520d49fd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.091075] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.160602] env[62914]: INFO nova.compute.manager [-] [instance: cead3557-080d-4956-a957-cac449bb69f6] Took 1.07 seconds to deallocate network for instance. [ 756.259622] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52937013-49bd-354f-77bc-38df948f58c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011767} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.264288] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.266022] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bc6da94e-4de8-4e56-a071-d04c5e5dad18/bc6da94e-4de8-4e56-a071-d04c5e5dad18.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 756.266022] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.266022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.266022] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.266022] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-babf50b3-c3a7-4f4f-994b-82191efd15b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.268623] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdfe3414-034d-453c-96d0-d628fde6212e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.277461] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 756.277461] env[62914]: value = "task-4831750" [ 756.277461] env[62914]: _type = "Task" [ 756.277461] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.283326] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.283562] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 756.287741] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d93bb9-1b47-4b97-95ba-890dfd9b9313 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.290841] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.291971] env[62914]: DEBUG nova.network.neutron [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.294133] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updated VIF entry in instance network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 756.294504] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updating instance_info_cache with network_info: [{"id": "c68776d2-73ad-4ec2-b114-31f5878098d8", "address": "fa:16:3e:20:21:c8", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68776d2-73", "ovs_interfaceid": "c68776d2-73ad-4ec2-b114-31f5878098d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.304036] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 756.304036] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520da9fa-9e39-18e4-5d99-8adfb42250cb" [ 756.304036] env[62914]: _type = "Task" [ 756.304036] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.313706] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520da9fa-9e39-18e4-5d99-8adfb42250cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.339624] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522420f9-b4ea-3108-1269-6e2e1bcb6bf5, 'name': SearchDatastore_Task, 'duration_secs': 0.020501} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.340385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.340964] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.341628] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.341801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.342137] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.342481] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d4098be-a0d4-4fec-b103-7775bf0571a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.367492] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.367834] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 756.369048] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce88201b-33c1-4917-a408-ea4518815fe9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.376818] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 756.376818] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205429c-150c-1161-c0e3-0ec7c412cd2c" [ 756.376818] env[62914]: _type = "Task" [ 756.376818] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.388437] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205429c-150c-1161-c0e3-0ec7c412cd2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.487934] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831749, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.523894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.529418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.432s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.532800] env[62914]: INFO nova.compute.claims [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.566352] env[62914]: INFO nova.scheduler.client.report [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Deleted allocations for instance bf2e9634-66ee-4b6a-a148-bc77420d793f [ 756.592124] env[62914]: DEBUG oslo_vmware.api [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831747, 'name': PowerOnVM_Task, 'duration_secs': 0.70291} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.592793] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 756.593189] env[62914]: DEBUG nova.compute.manager [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 756.594183] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133e7bab-7a96-461d-897f-29eec76629e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.674345] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.700033] env[62914]: DEBUG nova.compute.manager [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-vif-plugged-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 756.700256] env[62914]: DEBUG oslo_concurrency.lockutils [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] Acquiring lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.700504] env[62914]: DEBUG oslo_concurrency.lockutils [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.700709] env[62914]: DEBUG oslo_concurrency.lockutils [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.700848] env[62914]: DEBUG nova.compute.manager [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] No waiting events found dispatching network-vif-plugged-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 756.701278] env[62914]: WARNING nova.compute.manager [req-587d1df3-eeba-4f47-9a9e-2368f1dc0fb4 req-bae38c73-ff89-438a-a321-51dd32c9b00a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received unexpected event network-vif-plugged-691c01fe-1d59-431c-9474-7726ec537a5b for instance with vm_state building and task_state spawning. [ 756.764032] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831748, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.791180] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831750, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.797014] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.797357] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Instance network_info: |[{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 756.797860] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:2c:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '691c01fe-1d59-431c-9474-7726ec537a5b', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.806489] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Creating folder: Project (23ba9ece80a24353ac072b643cb16df7). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.808174] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Releasing lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.808539] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-changed-04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 756.808809] env[62914]: DEBUG nova.compute.manager [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Refreshing instance network info cache due to event network-changed-04bf9072-5af6-410e-bc35-bcd17631d744. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 756.809116] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquiring lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.809303] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Acquired lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.809486] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Refreshing network info cache for port 04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 756.811656] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-147a93c2-17b2-4c1b-ae92-e53486a94d96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.825888] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520da9fa-9e39-18e4-5d99-8adfb42250cb, 'name': SearchDatastore_Task, 'duration_secs': 0.011381} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.829182] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f418f22c-7c56-4768-8850-23501a7b1a0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.832341] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Created folder: Project (23ba9ece80a24353ac072b643cb16df7) in parent group-v941773. [ 756.832563] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Creating folder: Instances. Parent ref: group-v941915. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.833171] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25035c75-0409-417a-82f5-f39f3f33bf99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.839448] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 756.839448] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dab048-b9a1-8cdc-8f1c-946a3bea562a" [ 756.839448] env[62914]: _type = "Task" [ 756.839448] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.846150] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Created folder: Instances in parent group-v941915. [ 756.846573] env[62914]: DEBUG oslo.service.loopingcall [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.850881] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 756.851474] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dab048-b9a1-8cdc-8f1c-946a3bea562a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.851474] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e720cc99-b7d5-468e-a595-42305954a0ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.872623] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.872623] env[62914]: value = "task-4831753" [ 756.872623] env[62914]: _type = "Task" [ 756.872623] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.885026] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831753, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.891780] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205429c-150c-1161-c0e3-0ec7c412cd2c, 'name': SearchDatastore_Task, 'duration_secs': 0.017818} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.892641] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75458fcc-ca55-4954-a0b1-2b57e0b507cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.898616] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 756.898616] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e90dd8-7b89-0e10-72ec-ff9d1870cc65" [ 756.898616] env[62914]: _type = "Task" [ 756.898616] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.907584] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e90dd8-7b89-0e10-72ec-ff9d1870cc65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.986472] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831749, 'name': ReconfigVM_Task, 'duration_secs': 0.600807} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.986921] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 76dfbf82-0ed0-4621-890c-060b187b47e0/76dfbf82-0ed0-4621-890c-060b187b47e0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.987569] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8625ca00-db65-44e2-a7de-f5b6be06872d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.994910] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 756.994910] env[62914]: value = "task-4831754" [ 756.994910] env[62914]: _type = "Task" [ 756.994910] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.003822] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831754, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.086965] env[62914]: DEBUG oslo_concurrency.lockutils [None req-220dddfd-b353-4591-afb4-d39639930b25 tempest-ImagesNegativeTestJSON-924451865 tempest-ImagesNegativeTestJSON-924451865-project-member] Lock "bf2e9634-66ee-4b6a-a148-bc77420d793f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.234s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.115105] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.258476] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831748, 'name': PowerOffVM_Task, 'duration_secs': 1.073209} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.259047] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 757.266024] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfiguring VM instance instance-00000028 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 757.266177] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd3c9037-7c2d-4426-9657-b7d4206bedaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.292107] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573343} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.293925] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bc6da94e-4de8-4e56-a071-d04c5e5dad18/bc6da94e-4de8-4e56-a071-d04c5e5dad18.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 757.294277] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.294742] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 757.294742] env[62914]: value = "task-4831755" [ 757.294742] env[62914]: _type = "Task" [ 757.294742] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.295062] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbd3ea78-e82e-49b4-b70c-7201bb728f31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.311505] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.312222] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 757.312222] env[62914]: value = "task-4831756" [ 757.312222] env[62914]: _type = "Task" [ 757.312222] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.323873] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831756, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.355683] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dab048-b9a1-8cdc-8f1c-946a3bea562a, 'name': SearchDatastore_Task, 'duration_secs': 0.014651} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.355683] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.355683] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 757.355683] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38decfcd-12f5-4ee7-be06-561ecb9df599 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.367606] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 757.367606] env[62914]: value = "task-4831757" [ 757.367606] env[62914]: _type = "Task" [ 757.367606] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.387022] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.396023] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831753, 'name': CreateVM_Task, 'duration_secs': 0.51056} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.396023] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 757.396023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.396023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.396023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 757.396023] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c443ba1e-f677-4237-919a-1bce77637caa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.400557] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 757.400557] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d0bf59-37f1-10be-5271-c693ed68453b" [ 757.400557] env[62914]: _type = "Task" [ 757.400557] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.421379] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d0bf59-37f1-10be-5271-c693ed68453b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.421695] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e90dd8-7b89-0e10-72ec-ff9d1870cc65, 'name': SearchDatastore_Task, 'duration_secs': 0.017692} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.421942] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.422225] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 29a177e4-b5d7-4249-8fc5-2316f6891536/29a177e4-b5d7-4249-8fc5-2316f6891536.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 757.422534] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07f03ae7-43fa-41e2-a25f-9794f62fd5a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.432974] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 757.432974] env[62914]: value = "task-4831758" [ 757.432974] env[62914]: _type = "Task" [ 757.432974] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.442986] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.518228] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831754, 'name': Rename_Task, 'duration_secs': 0.361263} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.521666] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 757.522170] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1359a18a-9c24-4934-992e-2b37013def6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.533728] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 757.533728] env[62914]: value = "task-4831759" [ 757.533728] env[62914]: _type = "Task" [ 757.533728] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.545337] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.789409] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Successfully updated port: 392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.816664] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831755, 'name': ReconfigVM_Task, 'duration_secs': 0.461385} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.826163] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Reconfigured VM instance instance-00000028 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 757.826163] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 757.830220] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-768e0fdc-6770-4c17-b5f9-2685e55538eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.841357] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831756, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091392} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.843547] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.844188] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 757.844188] env[62914]: value = "task-4831760" [ 757.844188] env[62914]: _type = "Task" [ 757.844188] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.845336] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8115670b-9a56-41a0-b2dc-83bc35c4b634 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.859020] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Updated VIF entry in instance network info cache for port 04bf9072-5af6-410e-bc35-bcd17631d744. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 757.859677] env[62914]: DEBUG nova.network.neutron [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Updating instance_info_cache with network_info: [{"id": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "address": "fa:16:3e:ae:6f:a8", "network": {"id": "859729e6-4f73-427e-af96-b427481b7a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1677619083", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8918472-71", "ovs_interfaceid": "e8918472-71f8-4ab8-ae0e-d5333ff21e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04bf9072-5af6-410e-bc35-bcd17631d744", "address": "fa:16:3e:8f:3c:93", "network": {"id": "dd9152e7-5ed0-43e1-801e-a661156063bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-369016139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7da23e8d3c044f178c224a3e40a346a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04bf9072-5a", "ovs_interfaceid": "04bf9072-5af6-410e-bc35-bcd17631d744", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.903200] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] bc6da94e-4de8-4e56-a071-d04c5e5dad18/bc6da94e-4de8-4e56-a071-d04c5e5dad18.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.915097] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a28f7201-75b4-4711-a574-2bc3c31a38cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.929989] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831760, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.945600] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 757.945600] env[62914]: value = "task-4831761" [ 757.945600] env[62914]: _type = "Task" [ 757.945600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.945842] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d0bf59-37f1-10be-5271-c693ed68453b, 'name': SearchDatastore_Task, 'duration_secs': 0.02035} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.946248] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55798} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.950504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.950504] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.950807] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.951147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.951442] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.951801] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 757.952037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.961705] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51c1261b-8fca-4005-b904-f60c339e048d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.964330] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6cd4d679-e12a-4c2d-ae2e-e4afb2406321 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.967540] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831758, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.976970] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.978630] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 757.978630] env[62914]: value = "task-4831762" [ 757.978630] env[62914]: _type = "Task" [ 757.978630] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.988023] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.988023] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 757.988912] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-468f3aa1-0159-457d-8a4c-4071a95fcad2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.998100] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 757.998100] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52502b59-83c8-337d-0869-71f64ac28736" [ 757.998100] env[62914]: _type = "Task" [ 757.998100] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.002693] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.018529] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52502b59-83c8-337d-0869-71f64ac28736, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.052125] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.066019] env[62914]: DEBUG nova.compute.manager [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 758.066019] env[62914]: DEBUG nova.compute.manager [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing instance network info cache due to event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 758.066019] env[62914]: DEBUG oslo_concurrency.lockutils [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.066545] env[62914]: DEBUG oslo_concurrency.lockutils [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.066545] env[62914]: DEBUG nova.network.neutron [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 758.113443] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "4911baea-15df-46db-be11-fcf998eb0cb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.113761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.232923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "fed831e0-4518-4025-89b1-7f6b644e013d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.233387] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.235255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.235255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.235255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.236820] env[62914]: INFO nova.compute.manager [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Terminating instance [ 758.241338] env[62914]: DEBUG nova.compute.manager [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 758.241560] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 758.243154] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bef5c5e-c55d-401d-9e12-ab8fc1863c31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.255062] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 758.255402] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f55769c0-757a-41d2-bd5e-c3646a5662e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.267958] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 758.267958] env[62914]: value = "task-4831763" [ 758.267958] env[62914]: _type = "Task" [ 758.267958] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.285289] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.292201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.293542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.293542] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.353035] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cda2ea1-9cbf-4a32-8c9f-12769ba15e09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.366493] env[62914]: DEBUG oslo_concurrency.lockutils [req-385b9116-4f0d-4cf3-a519-e8965ba82241 req-2bbaad5d-d14a-4fcd-b6ee-85f3deb2134d service nova] Releasing lock "refresh_cache-bc6da94e-4de8-4e56-a071-d04c5e5dad18" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.367695] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831760, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.370701] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c530946-18ba-4bdb-9c6c-59792d65be6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.413012] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4416807c-91a5-4850-b34e-41e3ca8977de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.426264] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff9c87b-695b-4931-8b3a-50e052e1fcb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.443024] env[62914]: DEBUG nova.compute.provider_tree [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.457708] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.956213} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.458731] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 29a177e4-b5d7-4249-8fc5-2316f6891536/29a177e4-b5d7-4249-8fc5-2316f6891536.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 758.460304] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.460304] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a103e2de-399b-4271-aaad-079886fb0056 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.466757] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831761, 'name': ReconfigVM_Task, 'duration_secs': 0.447699} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.467572] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Reconfigured VM instance instance-0000002b to attach disk [datastore2] bc6da94e-4de8-4e56-a071-d04c5e5dad18/bc6da94e-4de8-4e56-a071-d04c5e5dad18.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.468330] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90fff442-7bca-4d4d-bbfe-174b1909d3c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.472733] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 758.472733] env[62914]: value = "task-4831764" [ 758.472733] env[62914]: _type = "Task" [ 758.472733] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.477647] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 758.477647] env[62914]: value = "task-4831765" [ 758.477647] env[62914]: _type = "Task" [ 758.477647] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.486283] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.496288] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831765, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.505283] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090582} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.508298] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.508298] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facf8e30-ca19-43ec-a247-502badf3821d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.518497] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52502b59-83c8-337d-0869-71f64ac28736, 'name': SearchDatastore_Task, 'duration_secs': 0.066394} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.536797] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.537320] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4fd4c6-73ba-414f-9c3d-bf33c8325d39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.540363] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f18f58-d161-4254-b183-396742a9f603 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.563228] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 758.563228] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad7ef-f9a1-cf09-c0e8-eb033a54d2de" [ 758.563228] env[62914]: _type = "Task" [ 758.563228] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.567377] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831759, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.572572] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 758.572572] env[62914]: value = "task-4831766" [ 758.572572] env[62914]: _type = "Task" [ 758.572572] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.583057] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad7ef-f9a1-cf09-c0e8-eb033a54d2de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.592445] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.784176] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831763, 'name': PowerOffVM_Task, 'duration_secs': 0.295068} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.784498] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 758.784695] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 758.784983] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe4ab03d-e7c7-4590-8a5d-647efc318246 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.862372] env[62914]: DEBUG oslo_vmware.api [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831760, 'name': PowerOnVM_Task, 'duration_secs': 0.580353} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.862838] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 758.863053] env[62914]: DEBUG nova.compute.manager [None req-3e8d6375-04f0-4f8a-a427-59f87c7b0016 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 758.863880] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbb5fb7-f357-46cc-bea6-744de0f1141c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.873716] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 758.873961] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 758.874169] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Deleting the datastore file [datastore2] fed831e0-4518-4025-89b1-7f6b644e013d {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.874675] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47b2aca0-48b4-4bf2-b670-a763685f2c11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.881306] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for the task: (returnval){ [ 758.881306] env[62914]: value = "task-4831768" [ 758.881306] env[62914]: _type = "Task" [ 758.881306] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.882223] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.892787] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.950802] env[62914]: DEBUG nova.scheduler.client.report [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.986906] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142562} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.987363] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.989092] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b665c43b-0eb3-43e9-8687-5da90cef0796 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.995895] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831765, 'name': Rename_Task, 'duration_secs': 0.254019} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.996738] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 758.997139] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3e6b280-ca09-40e6-af1d-02839bcb4f7e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.021237] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 29a177e4-b5d7-4249-8fc5-2316f6891536/29a177e4-b5d7-4249-8fc5-2316f6891536.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.024186] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acd38d72-cd89-4c9e-bd16-c86bb0eba162 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.045010] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 759.045010] env[62914]: value = "task-4831769" [ 759.045010] env[62914]: _type = "Task" [ 759.045010] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.062019] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 759.062019] env[62914]: value = "task-4831770" [ 759.062019] env[62914]: _type = "Task" [ 759.062019] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.073427] env[62914]: DEBUG oslo_vmware.api [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831759, 'name': PowerOnVM_Task, 'duration_secs': 1.158774} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.073884] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.077169] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 759.077586] env[62914]: INFO nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Took 14.47 seconds to spawn the instance on the hypervisor. [ 759.077813] env[62914]: DEBUG nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 759.079509] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff8ac22-eae5-4551-91c6-cd23111a73dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.094347] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.102095] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad7ef-f9a1-cf09-c0e8-eb033a54d2de, 'name': SearchDatastore_Task, 'duration_secs': 0.033096} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.108045] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.108045] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.108045] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 7d8287f9-10be-4834-8b7a-1b764145d1c3/7d8287f9-10be-4834-8b7a-1b764145d1c3.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 759.108045] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f640e726-82bf-483d-a602-19df8269ac19 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.114941] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 759.114941] env[62914]: value = "task-4831771" [ 759.114941] env[62914]: _type = "Task" [ 759.114941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.124153] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.206522] env[62914]: DEBUG nova.network.neutron [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updated VIF entry in instance network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 759.206896] env[62914]: DEBUG nova.network.neutron [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.269170] env[62914]: DEBUG nova.network.neutron [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Updating instance_info_cache with network_info: [{"id": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "address": "fa:16:3e:9c:3b:b9", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap392f2779-6b", "ovs_interfaceid": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.395389] env[62914]: DEBUG oslo_vmware.api [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Task: {'id': task-4831768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144299} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.396578] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.396874] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 759.397356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 759.397572] env[62914]: INFO nova.compute.manager [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 759.397831] env[62914]: DEBUG oslo.service.loopingcall [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.398322] env[62914]: DEBUG nova.compute.manager [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 759.398422] env[62914]: DEBUG nova.network.neutron [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.463426] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.934s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.464028] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 759.469315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.887s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.471858] env[62914]: INFO nova.compute.claims [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.576832] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.580645] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831769, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.591417] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831766, 'name': ReconfigVM_Task, 'duration_secs': 0.74306} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.594139] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Reconfigured VM instance instance-0000002a to attach disk [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6/e6544702-bde7-4056-8a50-adede5c6a9d6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.595324] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e670d9cd-fd62-4ba8-8d06-bfda4b842422 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.605278] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 759.605278] env[62914]: value = "task-4831772" [ 759.605278] env[62914]: _type = "Task" [ 759.605278] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.626797] env[62914]: INFO nova.compute.manager [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Took 42.40 seconds to build instance. [ 759.628797] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831772, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.637187] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831771, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.710269] env[62914]: DEBUG oslo_concurrency.lockutils [req-e45e721a-1ab2-4bdf-a2d0-c0309ec37fc0 req-fe08f57e-04e5-44f2-9ff9-ab8596dc6e80 service nova] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.719642] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.719924] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.774355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.775425] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Instance network_info: |[{"id": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "address": "fa:16:3e:9c:3b:b9", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap392f2779-6b", "ovs_interfaceid": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 759.776566] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:3b:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '392f2779-6bcf-4d28-9f9b-bd4279812dc7', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.787534] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating folder: Project (4860bec4a28e4289b7a508f007fff452). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.788631] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f0c75a6-4773-493a-92e9-a5d28b78cbe0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.814026] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created folder: Project (4860bec4a28e4289b7a508f007fff452) in parent group-v941773. [ 759.814566] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating folder: Instances. Parent ref: group-v941918. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.814990] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24b9309a-9508-46a2-b918-a48057d039d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.834907] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created folder: Instances in parent group-v941918. [ 759.835402] env[62914]: DEBUG oslo.service.loopingcall [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.835700] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 759.836200] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2f5d042-9ea3-47c7-ad9f-94ee92393209 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.867979] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.867979] env[62914]: value = "task-4831775" [ 759.867979] env[62914]: _type = "Task" [ 759.867979] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.880730] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831775, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.971502] env[62914]: DEBUG nova.compute.utils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.973304] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 759.973533] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 760.066472] env[62914]: DEBUG oslo_vmware.api [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831769, 'name': PowerOnVM_Task, 'duration_secs': 0.82737} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.072243] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 760.072243] env[62914]: INFO nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Took 18.27 seconds to spawn the instance on the hypervisor. [ 760.072243] env[62914]: DEBUG nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 760.072243] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4261e704-cfde-483f-9174-c6b6567e11f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.080941] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831770, 'name': ReconfigVM_Task, 'duration_secs': 0.820471} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.082698] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 29a177e4-b5d7-4249-8fc5-2316f6891536/29a177e4-b5d7-4249-8fc5-2316f6891536.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.086112] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-268d46b6-2c2f-45f7-8fa0-bc83a1321bc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.094266] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 760.094266] env[62914]: value = "task-4831776" [ 760.094266] env[62914]: _type = "Task" [ 760.094266] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.104853] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831776, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.107168] env[62914]: DEBUG nova.policy [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 760.118663] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831772, 'name': Rename_Task, 'duration_secs': 0.217218} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.122656] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 760.123513] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b29cc77-2fb9-4092-833c-def40b1487e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.133668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b92f8850-271a-4e10-9ba4-3dc55acb5936 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.940s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.134024] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79332} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.137329] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 7d8287f9-10be-4834-8b7a-1b764145d1c3/7d8287f9-10be-4834-8b7a-1b764145d1c3.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 760.137581] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 760.138639] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 760.138639] env[62914]: value = "task-4831777" [ 760.138639] env[62914]: _type = "Task" [ 760.138639] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.138639] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdd2d9fd-f5b6-4334-a129-2ef8aa9f39f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.151912] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831777, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.154923] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 760.154923] env[62914]: value = "task-4831778" [ 760.154923] env[62914]: _type = "Task" [ 760.154923] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.166584] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.230928] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.231210] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 760.273963] env[62914]: DEBUG nova.compute.manager [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Received event network-vif-plugged-392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 760.274306] env[62914]: DEBUG oslo_concurrency.lockutils [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] Acquiring lock "968cbfbe-1570-48d6-890d-c7a680855574-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.274869] env[62914]: DEBUG oslo_concurrency.lockutils [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] Lock "968cbfbe-1570-48d6-890d-c7a680855574-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.274961] env[62914]: DEBUG oslo_concurrency.lockutils [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] Lock "968cbfbe-1570-48d6-890d-c7a680855574-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.275394] env[62914]: DEBUG nova.compute.manager [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] No waiting events found dispatching network-vif-plugged-392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 760.275657] env[62914]: WARNING nova.compute.manager [req-15fdecb8-7bba-4994-b444-efefa108293e req-949d3fea-931a-4e64-8cca-e70c4ffb1ac3 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Received unexpected event network-vif-plugged-392f2779-6bcf-4d28-9f9b-bd4279812dc7 for instance with vm_state building and task_state spawning. [ 760.379240] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831775, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.477524] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 760.602921] env[62914]: INFO nova.compute.manager [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Took 44.27 seconds to build instance. [ 760.616414] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831776, 'name': Rename_Task, 'duration_secs': 0.243272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.616414] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 760.619916] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98cc6b09-f8f5-4372-882f-de4ef128d115 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.628520] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 760.628520] env[62914]: value = "task-4831779" [ 760.628520] env[62914]: _type = "Task" [ 760.628520] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.641506] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 760.645088] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.663550] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831777, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.671410] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109146} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.675423] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.676794] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32332af2-3696-40f1-8cdc-9bf4a698324b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.703248] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 7d8287f9-10be-4834-8b7a-1b764145d1c3/7d8287f9-10be-4834-8b7a-1b764145d1c3.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.707798] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-371a2b5c-b3b9-489f-9307-b398169652cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.734411] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 760.734411] env[62914]: value = "task-4831780" [ 760.734411] env[62914]: _type = "Task" [ 760.734411] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.751056] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831780, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.883515] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831775, 'name': CreateVM_Task, 'duration_secs': 0.540728} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.883800] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 760.884720] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.885011] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.885444] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 760.885798] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3ad7937-6fc8-4f84-919a-80d1ceb18ec1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.894377] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 760.894377] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0525d-d851-407f-c4d9-a7071d31b2e6" [ 760.894377] env[62914]: _type = "Task" [ 760.894377] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.904969] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0525d-d851-407f-c4d9-a7071d31b2e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.108872] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c4224a8a-c76a-45c2-9466-cf9081bca12d tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.368s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.143033] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831779, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.162760] env[62914]: DEBUG oslo_vmware.api [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831777, 'name': PowerOnVM_Task, 'duration_secs': 0.758464} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.162936] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 761.163185] env[62914]: DEBUG nova.compute.manager [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 761.164738] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a127f48d-d82b-4cb3-b984-de2686ffc976 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.177805] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.178609] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42e7f6d-c745-453f-8c05-5779804ec7a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.188120] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578df059-f342-47cf-9609-566bbffc1b07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.222901] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Successfully created port: 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.227194] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c90427-2a40-4a5f-a775-16b54b4a6898 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.243583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f997d5f7-53cf-4224-8ce2-65ebb29660c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.257506] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.266226] env[62914]: DEBUG nova.compute.provider_tree [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.331815] env[62914]: DEBUG nova.compute.manager [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Received event network-changed-392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 761.332018] env[62914]: DEBUG nova.compute.manager [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Refreshing instance network info cache due to event network-changed-392f2779-6bcf-4d28-9f9b-bd4279812dc7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 761.333435] env[62914]: DEBUG oslo_concurrency.lockutils [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] Acquiring lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.333435] env[62914]: DEBUG oslo_concurrency.lockutils [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] Acquired lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.333435] env[62914]: DEBUG nova.network.neutron [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Refreshing network info cache for port 392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.351661] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.351783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.351839] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 761.408725] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0525d-d851-407f-c4d9-a7071d31b2e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011984} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.411953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.411953] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.411953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.411953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.411953] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.411953] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7053738d-3ce9-41e0-9d39-cb8bd907a3c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.421457] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.421571] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 761.422419] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfb025b2-13da-4ca3-8deb-36163354225f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.429451] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 761.429451] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dcba7d-b76d-c25d-47ad-510cb3e68df6" [ 761.429451] env[62914]: _type = "Task" [ 761.429451] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.439055] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dcba7d-b76d-c25d-47ad-510cb3e68df6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.490815] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 761.512706] env[62914]: DEBUG nova.network.neutron [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.525787] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.526111] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.526287] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.526479] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.526678] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.526886] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.527288] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.527564] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.527819] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.528059] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.528261] env[62914]: DEBUG nova.virt.hardware [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.529402] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bb1ab4-1e5c-43ae-8bba-687dc9c209af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.539887] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d462e1e9-2ba3-4d79-ab6d-4fc31741e5af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.583050] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "5a704020-921e-4ede-9fd9-b745c027a158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.583359] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.583609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "5a704020-921e-4ede-9fd9-b745c027a158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.583820] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.584026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.586379] env[62914]: INFO nova.compute.manager [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Terminating instance [ 761.588383] env[62914]: DEBUG nova.compute.manager [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 761.588601] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 761.589493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed1ff7a-2653-4240-816a-5b2f2825734f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.597666] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 761.597940] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6070435f-537c-4645-848f-b02f2bcb3283 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.605901] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 761.605901] env[62914]: value = "task-4831781" [ 761.605901] env[62914]: _type = "Task" [ 761.605901] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.612026] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 761.619468] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.642903] env[62914]: DEBUG oslo_vmware.api [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831779, 'name': PowerOnVM_Task, 'duration_secs': 0.77334} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.643233] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 761.643471] env[62914]: INFO nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Took 14.31 seconds to spawn the instance on the hypervisor. [ 761.643670] env[62914]: DEBUG nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 761.644507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b882b10-367e-4820-8c56-876249750fb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.693459] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.747979] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831780, 'name': ReconfigVM_Task, 'duration_secs': 0.567994} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.748450] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 7d8287f9-10be-4834-8b7a-1b764145d1c3/7d8287f9-10be-4834-8b7a-1b764145d1c3.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.749136] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b58c48af-fea7-4c23-9a17-5456e6a6eeaa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.757751] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 761.757751] env[62914]: value = "task-4831782" [ 761.757751] env[62914]: _type = "Task" [ 761.757751] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.770819] env[62914]: DEBUG nova.scheduler.client.report [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.774486] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831782, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.941467] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dcba7d-b76d-c25d-47ad-510cb3e68df6, 'name': SearchDatastore_Task, 'duration_secs': 0.012732} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.942818] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d9bf923-b75b-4c5f-a66f-1787bad65e5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.951428] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 761.951428] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d44f50-a18a-5dd1-a2fd-02d207ca727b" [ 761.951428] env[62914]: _type = "Task" [ 761.951428] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.962803] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d44f50-a18a-5dd1-a2fd-02d207ca727b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.019676] env[62914]: INFO nova.compute.manager [-] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Took 2.62 seconds to deallocate network for instance. [ 762.116444] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831781, 'name': PowerOffVM_Task, 'duration_secs': 0.2932} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.116800] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 762.117041] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 762.117354] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea2e4db4-88fd-438b-949a-4619c1250556 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.141502] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.166715] env[62914]: INFO nova.compute.manager [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Took 43.37 seconds to build instance. [ 762.199596] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 762.200083] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 762.200322] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore1] 5a704020-921e-4ede-9fd9-b745c027a158 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.200662] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e6c152a-5ac9-42dc-9e89-ea0311f52b58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.212986] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 762.212986] env[62914]: value = "task-4831784" [ 762.212986] env[62914]: _type = "Task" [ 762.212986] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.224405] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.271886] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831782, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.276242] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.277120] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 762.281691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.993s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.283902] env[62914]: INFO nova.compute.claims [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.305323] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "8b83f82b-42f7-4f33-abc4-ff278d343309" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.305698] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.305996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.307599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.307599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.313022] env[62914]: INFO nova.compute.manager [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Terminating instance [ 762.313729] env[62914]: DEBUG nova.compute.manager [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 762.314075] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 762.315537] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12c15d0-143d-4dc5-b320-7c7722049196 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.319747] env[62914]: DEBUG nova.network.neutron [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Updated VIF entry in instance network info cache for port 392f2779-6bcf-4d28-9f9b-bd4279812dc7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 762.320140] env[62914]: DEBUG nova.network.neutron [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Updating instance_info_cache with network_info: [{"id": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "address": "fa:16:3e:9c:3b:b9", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap392f2779-6b", "ovs_interfaceid": "392f2779-6bcf-4d28-9f9b-bd4279812dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.328261] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 762.329367] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea3e3f4b-468c-481e-995f-8dcc58862d32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.339228] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 762.339228] env[62914]: value = "task-4831785" [ 762.339228] env[62914]: _type = "Task" [ 762.339228] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.350309] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.462024] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d44f50-a18a-5dd1-a2fd-02d207ca727b, 'name': SearchDatastore_Task, 'duration_secs': 0.013168} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.462333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.462613] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 968cbfbe-1570-48d6-890d-c7a680855574/968cbfbe-1570-48d6-890d-c7a680855574.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 762.462951] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db3ec70e-8bf5-4abd-8e2d-275707281f33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.471164] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 762.471164] env[62914]: value = "task-4831786" [ 762.471164] env[62914]: _type = "Task" [ 762.471164] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.481804] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.528492] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.669826] env[62914]: DEBUG oslo_concurrency.lockutils [None req-651dc365-09e0-4e53-8437-5b4e70750a20 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.071s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.733290] env[62914]: DEBUG oslo_vmware.api [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.418047} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.733605] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.733810] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 762.734012] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 762.734267] env[62914]: INFO nova.compute.manager [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Took 1.15 seconds to destroy the instance on the hypervisor. [ 762.734514] env[62914]: DEBUG oslo.service.loopingcall [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.735361] env[62914]: DEBUG nova.compute.manager [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 762.735463] env[62914]: DEBUG nova.network.neutron [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 762.774206] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831782, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.791463] env[62914]: DEBUG nova.compute.utils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 762.795089] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 762.795343] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 762.826028] env[62914]: DEBUG oslo_concurrency.lockutils [req-9936ca7b-df39-4dde-8fbb-2b67cc674d1a req-e39044d1-0477-4a6e-826e-3840c14515b0 service nova] Releasing lock "refresh_cache-968cbfbe-1570-48d6-890d-c7a680855574" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.837007] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [{"id": "ae6db457-8035-4a28-bf52-7113144cfe11", "address": "fa:16:3e:bd:16:b0", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae6db457-80", "ovs_interfaceid": "ae6db457-8035-4a28-bf52-7113144cfe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.857942] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831785, 'name': PowerOffVM_Task, 'duration_secs': 0.329969} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.857942] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 762.857942] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 762.857942] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f097b934-34c3-4d02-9dfe-326b0bb143a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.878270] env[62914]: DEBUG nova.policy [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60ed60579cd74494959b0a7f306f2832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '319610053c8a4ca19dcb0c0b3e6b6596', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 762.946915] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 762.947224] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 762.947441] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleting the datastore file [datastore2] 8b83f82b-42f7-4f33-abc4-ff278d343309 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.947973] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e67608a7-d7ac-4772-8965-332d23490068 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.957263] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 762.957263] env[62914]: value = "task-4831788" [ 762.957263] env[62914]: _type = "Task" [ 762.957263] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.975081] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.987082] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.140292] env[62914]: DEBUG nova.compute.manager [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Received event network-changed-29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 763.140292] env[62914]: DEBUG nova.compute.manager [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Refreshing instance network info cache due to event network-changed-29704154-556c-4ee1-a5d2-fafcd0ac6017. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 763.140292] env[62914]: DEBUG oslo_concurrency.lockutils [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] Acquiring lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.140292] env[62914]: DEBUG oslo_concurrency.lockutils [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] Acquired lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.140292] env[62914]: DEBUG nova.network.neutron [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Refreshing network info cache for port 29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 763.176025] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 763.277510] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831782, 'name': Rename_Task, 'duration_secs': 1.240208} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.278077] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 763.278698] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50e5c2d7-6b57-4308-aa4b-0707920eaa0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.293562] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 763.293562] env[62914]: value = "task-4831789" [ 763.293562] env[62914]: _type = "Task" [ 763.293562] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.299027] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 763.317021] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Successfully created port: 458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.321905] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831789, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.342901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-aede8da7-8bf2-4963-b08b-6e06007614a5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.343132] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 763.343671] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.343869] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.344012] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.344180] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.344566] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.344566] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.344724] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 763.344876] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.468608] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.491322] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831786, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.605518] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.605877] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.671393] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Successfully updated port: 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.686021] env[62914]: DEBUG nova.network.neutron [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.709239] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.830717] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.853887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.972453] env[62914]: DEBUG oslo_vmware.api [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.652679} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.972834] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.973016] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 763.973212] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 763.973432] env[62914]: INFO nova.compute.manager [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Took 1.66 seconds to destroy the instance on the hypervisor. [ 763.973639] env[62914]: DEBUG oslo.service.loopingcall [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.980177] env[62914]: DEBUG nova.compute.manager [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 763.980290] env[62914]: DEBUG nova.network.neutron [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 763.983854] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04993a42-0f6a-4e4e-9d21-0ffee286baab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.995235] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831786, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.137451} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.996258] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c929bfc4-b7a8-4f68-abb9-e818fc7e0ff0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.999859] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 968cbfbe-1570-48d6-890d-c7a680855574/968cbfbe-1570-48d6-890d-c7a680855574.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 764.000828] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.001797] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13c7393c-f0af-40ce-bf0c-0b9479535b75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.046278] env[62914]: DEBUG nova.network.neutron [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updated VIF entry in instance network info cache for port 29704154-556c-4ee1-a5d2-fafcd0ac6017. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 764.047692] env[62914]: DEBUG nova.network.neutron [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updating instance_info_cache with network_info: [{"id": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "address": "fa:16:3e:86:1d:48", "network": {"id": "2ade9d92-db6f-492b-b2fc-c0b736f0a7e5", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1526613118-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d3a3c33ee0c43e981a93d51f5779c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29704154-55", "ovs_interfaceid": "29704154-556c-4ee1-a5d2-fafcd0ac6017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.048583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0c5d13-97ab-4c2b-9b77-5cd92b2881d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.051717] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 764.051717] env[62914]: value = "task-4831790" [ 764.051717] env[62914]: _type = "Task" [ 764.051717] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.061636] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4f20f1-cf4f-467d-b8b0-859a1bc0038b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.069898] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.081301] env[62914]: DEBUG nova.compute.provider_tree [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.175776] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.175972] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.178712] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.189910] env[62914]: INFO nova.compute.manager [-] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Took 1.45 seconds to deallocate network for instance. [ 764.300697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.301116] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.301363] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.301561] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.301743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.308436] env[62914]: INFO nova.compute.manager [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Terminating instance [ 764.311614] env[62914]: DEBUG nova.compute.manager [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 764.311920] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 764.312774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9f03f4-18c7-44cb-b5bc-db7085762fc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.320108] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831789, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.323994] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 764.328763] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 764.329077] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56f9f731-722d-428f-8b27-0e9cc30ea6b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.337752] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 764.337752] env[62914]: value = "task-4831791" [ 764.337752] env[62914]: _type = "Task" [ 764.337752] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.348451] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.358864] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.359223] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.359520] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.359714] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.359772] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.359979] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.360233] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.360402] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.360578] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.360752] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.360933] env[62914]: DEBUG nova.virt.hardware [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.361835] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8f6c05-c3b5-47bf-ae61-09ab222a8bd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.370357] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3217a32-31f9-4688-afd1-7b17d0b297b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.552758] env[62914]: DEBUG oslo_concurrency.lockutils [req-286bd88b-26a8-44f1-8722-243f5f3f6947 req-2947ed60-3b53-49f2-9f49-15f343ccbab1 service nova] Releasing lock "refresh_cache-76dfbf82-0ed0-4621-890c-060b187b47e0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.563110] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182874} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.563458] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.565302] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6b151f-7147-4580-99b3-85224f0e1a06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.591338] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 968cbfbe-1570-48d6-890d-c7a680855574/968cbfbe-1570-48d6-890d-c7a680855574.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.594851] env[62914]: DEBUG nova.scheduler.client.report [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 764.598718] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bf1bbcb-b7fa-4ffe-a228-9edb66a39d83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.622087] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 764.622087] env[62914]: value = "task-4831792" [ 764.622087] env[62914]: _type = "Task" [ 764.622087] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.633932] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831792, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.705276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.724462] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.810432] env[62914]: DEBUG oslo_vmware.api [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831789, 'name': PowerOnVM_Task, 'duration_secs': 1.067233} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.810724] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 764.810953] env[62914]: INFO nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Took 12.51 seconds to spawn the instance on the hypervisor. [ 764.811167] env[62914]: DEBUG nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 764.811952] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901b7d04-326b-4b74-9061-4f7b5e671dd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.850327] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831791, 'name': PowerOffVM_Task, 'duration_secs': 0.229283} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.850636] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 764.850842] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 764.851131] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3947c086-b509-4b57-bc07-066aba33ff3e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.915097] env[62914]: DEBUG nova.compute.manager [req-301a8f1d-bccb-4ace-8fb7-1875d18cb28e req-86d4ad46-7bbc-4546-b5bb-78989d076a9d service nova] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Received event network-vif-deleted-df40a274-9dea-4b4c-be39-6e7556e77032 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 764.955953] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 764.956398] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 764.956734] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleting the datastore file [datastore2] bc6da94e-4de8-4e56-a071-d04c5e5dad18 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.957145] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-476d37e1-00c6-408e-bd29-b2e051790e10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.966088] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for the task: (returnval){ [ 764.966088] env[62914]: value = "task-4831794" [ 764.966088] env[62914]: _type = "Task" [ 764.966088] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.980809] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.022175] env[62914]: DEBUG nova.network.neutron [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.094489] env[62914]: DEBUG nova.network.neutron [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.117315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.117944] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 765.121071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.541s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.121330] env[62914]: DEBUG nova.objects.instance [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lazy-loading 'resources' on Instance uuid bfdd7711-d081-42cf-9e4a-2df556d1b72e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 765.132301] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.133155] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Successfully updated port: 458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.148339] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "e6544702-bde7-4056-8a50-adede5c6a9d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.148595] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.148804] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "e6544702-bde7-4056-8a50-adede5c6a9d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.148993] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.149261] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.151268] env[62914]: INFO nova.compute.manager [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Terminating instance [ 765.152909] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "refresh_cache-e6544702-bde7-4056-8a50-adede5c6a9d6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.153087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquired lock "refresh_cache-e6544702-bde7-4056-8a50-adede5c6a9d6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.153267] env[62914]: DEBUG nova.network.neutron [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.335189] env[62914]: INFO nova.compute.manager [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Took 37.24 seconds to build instance. [ 765.479025] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.525635] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.526118] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Instance network_info: |[{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 765.526681] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:c8:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06e36426-302a-4bcd-bb7a-f9d6dd3a72c2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.536652] env[62914]: DEBUG oslo.service.loopingcall [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.536943] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.537216] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9eb8975f-6528-4faa-905e-aa27c5f47be9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.560270] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.560270] env[62914]: value = "task-4831795" [ 765.560270] env[62914]: _type = "Task" [ 765.560270] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.571520] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831795, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.597905] env[62914]: INFO nova.compute.manager [-] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Took 1.62 seconds to deallocate network for instance. [ 765.622785] env[62914]: DEBUG nova.compute.utils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.625055] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 765.625055] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 765.636113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.636113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.636415] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.642634] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831792, 'name': ReconfigVM_Task, 'duration_secs': 0.90121} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.643477] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 968cbfbe-1570-48d6-890d-c7a680855574/968cbfbe-1570-48d6-890d-c7a680855574.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.644232] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0fd35fc-9575-44a8-b32a-05683a6c5959 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.653979] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 765.653979] env[62914]: value = "task-4831796" [ 765.653979] env[62914]: _type = "Task" [ 765.653979] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.670365] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831796, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.684393] env[62914]: DEBUG nova.policy [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 765.691590] env[62914]: DEBUG nova.network.neutron [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.815414] env[62914]: DEBUG nova.network.neutron [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.837385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9df777fc-cdd3-4011-8d64-b901bbb4b81c tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.472s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.982694] env[62914]: DEBUG oslo_vmware.api [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Task: {'id': task-4831794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.536763} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.983022] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.985067] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 765.985292] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 765.985796] env[62914]: INFO nova.compute.manager [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Took 1.67 seconds to destroy the instance on the hypervisor. [ 765.985796] env[62914]: DEBUG oslo.service.loopingcall [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.986823] env[62914]: DEBUG nova.compute.manager [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 765.986823] env[62914]: DEBUG nova.network.neutron [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.002339] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Successfully created port: 2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.071249] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831795, 'name': CreateVM_Task, 'duration_secs': 0.436606} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.071429] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 766.074681] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.074873] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.075238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.076644] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36a5cba4-9a33-4f16-be78-f0dcbb24c7bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.081805] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 766.081805] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52359f86-69ad-554a-7978-b5de6830e7d7" [ 766.081805] env[62914]: _type = "Task" [ 766.081805] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.091264] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52359f86-69ad-554a-7978-b5de6830e7d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.105387] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.129966] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 766.167823] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831796, 'name': Rename_Task, 'duration_secs': 0.203851} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.168153] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.168429] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68db7d18-6f0f-48c3-9af7-0d90d19b6ced {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.178211] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 766.178211] env[62914]: value = "task-4831797" [ 766.178211] env[62914]: _type = "Task" [ 766.178211] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.187895] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.249302] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf06863-fcd1-4657-be5b-5e152ba155c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.258420] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8173fefd-64ac-4f60-b1e2-16c931427962 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.292079] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907b8b45-620a-413b-b259-bd0dc45f8b34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.301088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c9280a-d204-4a96-beb5-9b814780bcdc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.316456] env[62914]: DEBUG nova.compute.provider_tree [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.319199] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Releasing lock "refresh_cache-e6544702-bde7-4056-8a50-adede5c6a9d6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.319606] env[62914]: DEBUG nova.compute.manager [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 766.319838] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 766.320721] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e481eb6-f977-462f-9487-44307ee5afa2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.329759] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 766.329976] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ed89e9f-987b-4b93-bfaf-4a38bb62f9dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.339163] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 766.339163] env[62914]: value = "task-4831798" [ 766.339163] env[62914]: _type = "Task" [ 766.339163] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.345101] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 766.353704] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831798, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.399284] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.596433] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52359f86-69ad-554a-7978-b5de6830e7d7, 'name': SearchDatastore_Task, 'duration_secs': 0.033268} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.600015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.600109] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.600294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.600437] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.600618] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.602247] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc2f2fe1-aed5-4b9b-aeb9-c426d5f8aeaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.613064] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.613064] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 766.617021] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-221b672d-20bf-45e6-92aa-b153a391e326 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.621123] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 766.621123] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233d5a3-ccc7-e89f-9141-6c95db7c4de8" [ 766.621123] env[62914]: _type = "Task" [ 766.621123] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.630187] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233d5a3-ccc7-e89f-9141-6c95db7c4de8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.656736] env[62914]: DEBUG nova.network.neutron [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updating instance_info_cache with network_info: [{"id": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "address": "fa:16:3e:f0:25:3b", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458d38ce-bc", "ovs_interfaceid": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.691238] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831797, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.820556] env[62914]: DEBUG nova.scheduler.client.report [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 766.858496] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831798, 'name': PowerOffVM_Task, 'duration_secs': 0.128391} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.859928] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 766.860117] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 766.861745] env[62914]: DEBUG nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Received event network-vif-deleted-48f699bf-5203-47d2-88d5-9747169234ea {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 766.861965] env[62914]: DEBUG nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Received event network-vif-plugged-458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 766.862193] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Acquiring lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.862404] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.862589] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.862777] env[62914]: DEBUG nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] No waiting events found dispatching network-vif-plugged-458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 766.863129] env[62914]: WARNING nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Received unexpected event network-vif-plugged-458d38ce-bc0b-471c-a588-9d31e99cbe74 for instance with vm_state building and task_state spawning. [ 766.863212] env[62914]: DEBUG nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Received event network-changed-458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 766.864061] env[62914]: DEBUG nova.compute.manager [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Refreshing instance network info cache due to event network-changed-458d38ce-bc0b-471c-a588-9d31e99cbe74. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 766.864061] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Acquiring lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.864061] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-897bd689-83cc-4f87-a03b-2760c6cdf3cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.876148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.894733] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 766.895009] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 766.895292] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Deleting the datastore file [datastore2] e6544702-bde7-4056-8a50-adede5c6a9d6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.895582] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eef5ae33-cc03-4658-89c4-80d82e34fd8d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.903991] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for the task: (returnval){ [ 766.903991] env[62914]: value = "task-4831800" [ 766.903991] env[62914]: _type = "Task" [ 766.903991] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.913462] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.134354] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233d5a3-ccc7-e89f-9141-6c95db7c4de8, 'name': SearchDatastore_Task, 'duration_secs': 0.012515} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.134888] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4d16e89-ad7a-4095-836e-6b01ff4173d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.140715] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 767.144672] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 767.144672] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528d0db5-55a9-e6b3-376f-380cea5d1a6c" [ 767.144672] env[62914]: _type = "Task" [ 767.144672] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.155494] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528d0db5-55a9-e6b3-376f-380cea5d1a6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.162464] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.162464] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Instance network_info: |[{"id": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "address": "fa:16:3e:f0:25:3b", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458d38ce-bc", "ovs_interfaceid": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 767.163082] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Acquired lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.167020] env[62914]: DEBUG nova.network.neutron [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Refreshing network info cache for port 458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 767.167020] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:25:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8abee039-d93e-48a7-8911-6416a3e1ff30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '458d38ce-bc0b-471c-a588-9d31e99cbe74', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.173796] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Creating folder: Project (319610053c8a4ca19dcb0c0b3e6b6596). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.177837] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.178333] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.178768] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.179112] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.180355] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.180818] env[62914]: DEBUG nova.virt.hardware [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.181633] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f2ef85c-d36c-4a74-91ec-10c66f6b9b97 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.184122] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb027b82-a9a5-4f06-a0b2-d8ed7c21e1f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.200470] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eaa20c-be61-41d0-b938-8c56eac0a5fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.204617] env[62914]: DEBUG oslo_vmware.api [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831797, 'name': PowerOnVM_Task, 'duration_secs': 0.596163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.206494] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 767.206750] env[62914]: INFO nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Took 11.97 seconds to spawn the instance on the hypervisor. [ 767.206958] env[62914]: DEBUG nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 767.207276] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Created folder: Project (319610053c8a4ca19dcb0c0b3e6b6596) in parent group-v941773. [ 767.207446] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Creating folder: Instances. Parent ref: group-v941922. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.209055] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcb43d1-fa7a-40b9-9454-f4dcbcae35f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.212297] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92288579-c0ac-429f-9f93-40c7f0629dd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.232817] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Created folder: Instances in parent group-v941922. [ 767.233143] env[62914]: DEBUG oslo.service.loopingcall [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.233356] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 767.233595] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a4d68b4-8a19-42ae-9828-9d6b7589f818 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.253656] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.253656] env[62914]: value = "task-4831803" [ 767.253656] env[62914]: _type = "Task" [ 767.253656] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.263838] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831803, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.327068] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.329500] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.993s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.329876] env[62914]: DEBUG nova.objects.instance [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lazy-loading 'resources' on Instance uuid 1fa01184-1ed2-43de-bcbf-bd8658acc9f9 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.360045] env[62914]: INFO nova.scheduler.client.report [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted allocations for instance bfdd7711-d081-42cf-9e4a-2df556d1b72e [ 767.418254] env[62914]: DEBUG oslo_vmware.api [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Task: {'id': task-4831800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193426} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.419713] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.419713] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 767.419713] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.419713] env[62914]: INFO nova.compute.manager [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 767.419713] env[62914]: DEBUG oslo.service.loopingcall [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.419713] env[62914]: DEBUG nova.compute.manager [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 767.420091] env[62914]: DEBUG nova.network.neutron [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.444672] env[62914]: DEBUG nova.network.neutron [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.629815] env[62914]: DEBUG nova.network.neutron [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.659312] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528d0db5-55a9-e6b3-376f-380cea5d1a6c, 'name': SearchDatastore_Task, 'duration_secs': 0.015219} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.661019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.661019] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 2d48056c-d38f-4be1-b28b-71da14607870/2d48056c-d38f-4be1-b28b-71da14607870.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 767.661019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bb43d9c-688a-4f6e-b0fc-cdb38d02613e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.671021] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 767.671021] env[62914]: value = "task-4831804" [ 767.671021] env[62914]: _type = "Task" [ 767.671021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.685436] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.742103] env[62914]: INFO nova.compute.manager [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Took 37.64 seconds to build instance. [ 767.767218] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831803, 'name': CreateVM_Task, 'duration_secs': 0.475535} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.768191] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 767.768755] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.768982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.771109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.771109] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11a05072-5b41-413c-8684-63920aaecad6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.777397] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 767.777397] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5222b7a8-56db-53f8-2a8d-142f7ba009c0" [ 767.777397] env[62914]: _type = "Task" [ 767.777397] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.789346] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5222b7a8-56db-53f8-2a8d-142f7ba009c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.871837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9ace44e5-87ac-445a-b169-f527629cc083 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "bfdd7711-d081-42cf-9e4a-2df556d1b72e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.789s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.947917] env[62914]: DEBUG nova.network.neutron [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.093569] env[62914]: DEBUG nova.network.neutron [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updated VIF entry in instance network info cache for port 458d38ce-bc0b-471c-a588-9d31e99cbe74. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 768.093942] env[62914]: DEBUG nova.network.neutron [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updating instance_info_cache with network_info: [{"id": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "address": "fa:16:3e:f0:25:3b", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458d38ce-bc", "ovs_interfaceid": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.132399] env[62914]: INFO nova.compute.manager [-] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Took 2.15 seconds to deallocate network for instance. [ 768.146943] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Successfully updated port: 2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 768.184494] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831804, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.243988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-26be1ec7-4fe6-4269-bea5-498c40dfcda7 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.155s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.296318] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5222b7a8-56db-53f8-2a8d-142f7ba009c0, 'name': SearchDatastore_Task, 'duration_secs': 0.020242} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.296318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.296318] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.296318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.296318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.296318] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.296318] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0575bd63-960a-4269-baec-7d7ec53ba686 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.317027] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.317027] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 768.317416] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09326776-7728-41f0-8618-867d9cd763e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.334498] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 768.334498] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e11581-0e5e-1c68-92b6-a3a4fc3100e1" [ 768.334498] env[62914]: _type = "Task" [ 768.334498] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.352024] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-plugged-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 768.352024] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.352024] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.352024] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.352024] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] No waiting events found dispatching network-vif-plugged-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 768.352024] env[62914]: WARNING nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received unexpected event network-vif-plugged-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 for instance with vm_state building and task_state spawning. [ 768.352024] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-changed-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 768.352024] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing instance network info cache due to event network-changed-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 768.352024] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.352024] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.352024] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing network info cache for port 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 768.364666] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e11581-0e5e-1c68-92b6-a3a4fc3100e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.448021] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babe36f5-5086-453c-bcbc-1af79b0bb269 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.452028] env[62914]: INFO nova.compute.manager [-] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Took 1.03 seconds to deallocate network for instance. [ 768.462232] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edd4e32-5bca-4b3b-bf2c-8c595b028c40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.505111] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb1a49e-12f9-41cf-b20d-4ad864fa188e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.513484] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc60e0f6-4d07-4e7c-b986-eff937960fdc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.534023] env[62914]: DEBUG nova.compute.provider_tree [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.599300] env[62914]: DEBUG oslo_concurrency.lockutils [req-e489faaa-5cfc-4700-8868-9f060afe972c req-fbf9b157-30c3-419b-9a37-6b1d74566989 service nova] Releasing lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.642745] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.651590] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.651765] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.651945] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 768.681526] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.85273} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.681877] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 2d48056c-d38f-4be1-b28b-71da14607870/2d48056c-d38f-4be1-b28b-71da14607870.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 768.682056] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.682341] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ed06c4e-8c51-43d4-a6ba-6a60fa64a20e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.690108] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 768.690108] env[62914]: value = "task-4831805" [ 768.690108] env[62914]: _type = "Task" [ 768.690108] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.702488] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.746955] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 768.847163] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e11581-0e5e-1c68-92b6-a3a4fc3100e1, 'name': SearchDatastore_Task, 'duration_secs': 0.069701} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.848513] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93ab4403-1f10-4a44-ad60-a63ee782ce56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.857224] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 768.857224] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52729c9d-8032-1c9f-4fd7-577dbfaa6357" [ 768.857224] env[62914]: _type = "Task" [ 768.857224] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.868680] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52729c9d-8032-1c9f-4fd7-577dbfaa6357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.964079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.034345] env[62914]: DEBUG nova.scheduler.client.report [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 769.078195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "4496a977-30b2-4323-a561-884633958cdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.078753] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.079011] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "4496a977-30b2-4323-a561-884633958cdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.079236] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.079418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.081963] env[62914]: INFO nova.compute.manager [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Terminating instance [ 769.084540] env[62914]: DEBUG nova.compute.manager [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 769.084930] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 769.086275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9cc7af-8558-492d-865e-c9454e7f6882 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.090237] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updated VIF entry in instance network info cache for port 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 769.090897] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.097658] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 769.097956] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4251c014-d92e-4956-9fb9-c9916da1557c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.106670] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 769.106670] env[62914]: value = "task-4831806" [ 769.106670] env[62914]: _type = "Task" [ 769.106670] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.118491] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831806, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.187308] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 769.203463] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079053} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.203463] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.203463] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb62eede-0371-46ce-8807-b94c519c9c7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.236735] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 2d48056c-d38f-4be1-b28b-71da14607870/2d48056c-d38f-4be1-b28b-71da14607870.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.236735] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac0c27be-a58f-4d67-a11a-95205f998d79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.266088] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 769.266088] env[62914]: value = "task-4831807" [ 769.266088] env[62914]: _type = "Task" [ 769.266088] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.278605] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831807, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.284622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.369176] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52729c9d-8032-1c9f-4fd7-577dbfaa6357, 'name': SearchDatastore_Task, 'duration_secs': 0.015567} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.369457] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.370030] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1fb67ac1-c0b7-48b9-8562-d457d46709bc/1fb67ac1-c0b7-48b9-8562-d457d46709bc.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 769.370219] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d2c4090-2f3c-408b-aa47-a5c156c8082b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.379383] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 769.379383] env[62914]: value = "task-4831808" [ 769.379383] env[62914]: _type = "Task" [ 769.379383] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.390272] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.402391] env[62914]: DEBUG nova.network.neutron [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Updating instance_info_cache with network_info: [{"id": "2de06f63-3449-4e6e-af95-5835f882045b", "address": "fa:16:3e:32:91:c9", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de06f63-34", "ovs_interfaceid": "2de06f63-3449-4e6e-af95-5835f882045b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.541241] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.543718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.092s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.545845] env[62914]: INFO nova.compute.claims [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.589286] env[62914]: INFO nova.scheduler.client.report [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Deleted allocations for instance 1fa01184-1ed2-43de-bcbf-bd8658acc9f9 [ 769.595069] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.595305] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Received event network-vif-deleted-f2c2416f-fd5d-479b-b87b-5c00e77e23d0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 769.595741] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 769.595741] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing instance network info cache due to event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 769.596187] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquiring lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.596187] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquired lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.596345] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 769.622742] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831806, 'name': PowerOffVM_Task, 'duration_secs': 0.378561} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.623673] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 769.623891] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 769.624187] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a548eb06-25ca-4850-b1e1-df8cf713e540 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.705661] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 769.705661] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 769.705661] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Deleting the datastore file [datastore1] 4496a977-30b2-4323-a561-884633958cdf {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.705661] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-384d7663-356c-4fa7-ac30-7021e39e12a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.717114] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for the task: (returnval){ [ 769.717114] env[62914]: value = "task-4831810" [ 769.717114] env[62914]: _type = "Task" [ 769.717114] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.728868] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.779138] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831807, 'name': ReconfigVM_Task, 'duration_secs': 0.384751} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.779574] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 2d48056c-d38f-4be1-b28b-71da14607870/2d48056c-d38f-4be1-b28b-71da14607870.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.780355] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fe99a9f-dad7-4c06-a79d-cccbcbd4ba25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.793780] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 769.793780] env[62914]: value = "task-4831811" [ 769.793780] env[62914]: _type = "Task" [ 769.793780] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.809606] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831811, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.869383] env[62914]: DEBUG nova.compute.manager [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Received event network-vif-plugged-2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 769.869635] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Acquiring lock "e061304c-998b-4331-b60d-809916844a6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.869903] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Lock "e061304c-998b-4331-b60d-809916844a6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.870143] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Lock "e061304c-998b-4331-b60d-809916844a6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.870332] env[62914]: DEBUG nova.compute.manager [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] No waiting events found dispatching network-vif-plugged-2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 769.870522] env[62914]: WARNING nova.compute.manager [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Received unexpected event network-vif-plugged-2de06f63-3449-4e6e-af95-5835f882045b for instance with vm_state building and task_state spawning. [ 769.870735] env[62914]: DEBUG nova.compute.manager [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Received event network-changed-2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 769.870932] env[62914]: DEBUG nova.compute.manager [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Refreshing instance network info cache due to event network-changed-2de06f63-3449-4e6e-af95-5835f882045b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 769.871133] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Acquiring lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.893791] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831808, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.907155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.907155] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Instance network_info: |[{"id": "2de06f63-3449-4e6e-af95-5835f882045b", "address": "fa:16:3e:32:91:c9", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de06f63-34", "ovs_interfaceid": "2de06f63-3449-4e6e-af95-5835f882045b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 769.907155] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Acquired lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.907155] env[62914]: DEBUG nova.network.neutron [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Refreshing network info cache for port 2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 769.909384] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:91:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2de06f63-3449-4e6e-af95-5835f882045b', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.919512] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating folder: Project (b19293a423174c20963c000441db100e). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 769.922106] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cb94925-1f9b-4039-95b7-cea9b4fa00e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.937707] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created folder: Project (b19293a423174c20963c000441db100e) in parent group-v941773. [ 769.938468] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating folder: Instances. Parent ref: group-v941925. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 769.940088] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90fefda3-6ba6-4c6f-969e-05b7f07524da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.955756] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created folder: Instances in parent group-v941925. [ 769.956767] env[62914]: DEBUG oslo.service.loopingcall [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.956767] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e061304c-998b-4331-b60d-809916844a6f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 769.956767] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-669a9185-dd18-45f5-8d3f-45e271236e43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.982275] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.982275] env[62914]: value = "task-4831814" [ 769.982275] env[62914]: _type = "Task" [ 769.982275] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.998655] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831814, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.103953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de1b5fa7-be04-4d46-b709-1751e1df2421 tempest-ListServersNegativeTestJSON-838840043 tempest-ListServersNegativeTestJSON-838840043-project-member] Lock "1fa01184-1ed2-43de-bcbf-bd8658acc9f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.965s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.232992] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.305761] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831811, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.386782] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updated VIF entry in instance network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 770.387152] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updating instance_info_cache with network_info: [{"id": "c68776d2-73ad-4ec2-b114-31f5878098d8", "address": "fa:16:3e:20:21:c8", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68776d2-73", "ovs_interfaceid": "c68776d2-73ad-4ec2-b114-31f5878098d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.395795] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80925} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.397209] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1fb67ac1-c0b7-48b9-8562-d457d46709bc/1fb67ac1-c0b7-48b9-8562-d457d46709bc.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 770.397209] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.397209] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d8db2aa-6f67-46fc-a014-961b1889ed1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.404924] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 770.404924] env[62914]: value = "task-4831815" [ 770.404924] env[62914]: _type = "Task" [ 770.404924] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.416517] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831815, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.492977] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831814, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.683808] env[62914]: DEBUG nova.network.neutron [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Updated VIF entry in instance network info cache for port 2de06f63-3449-4e6e-af95-5835f882045b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 770.684509] env[62914]: DEBUG nova.network.neutron [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Updating instance_info_cache with network_info: [{"id": "2de06f63-3449-4e6e-af95-5835f882045b", "address": "fa:16:3e:32:91:c9", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de06f63-34", "ovs_interfaceid": "2de06f63-3449-4e6e-af95-5835f882045b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.693925] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "968cbfbe-1570-48d6-890d-c7a680855574" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.694215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.694442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "968cbfbe-1570-48d6-890d-c7a680855574-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.694633] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.694807] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.699877] env[62914]: INFO nova.compute.manager [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Terminating instance [ 770.701834] env[62914]: DEBUG nova.compute.manager [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 770.702056] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 770.702891] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658fa96d-d236-4569-948f-a4fbae22fe43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.713854] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 770.714163] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-379d1694-a43d-410d-a256-b9a660bc1e58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.727080] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 770.727080] env[62914]: value = "task-4831816" [ 770.727080] env[62914]: _type = "Task" [ 770.727080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.734845] env[62914]: DEBUG oslo_vmware.api [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Task: {'id': task-4831810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.796916} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.735521] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.735742] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 770.735960] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.736124] env[62914]: INFO nova.compute.manager [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] [instance: 4496a977-30b2-4323-a561-884633958cdf] Took 1.65 seconds to destroy the instance on the hypervisor. [ 770.736370] env[62914]: DEBUG oslo.service.loopingcall [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.736569] env[62914]: DEBUG nova.compute.manager [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 770.736680] env[62914]: DEBUG nova.network.neutron [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.741519] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.812151] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831811, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.890172] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Releasing lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.890648] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-vif-deleted-04bf9072-5af6-410e-bc35-bcd17631d744 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 770.890906] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 770.891561] env[62914]: DEBUG nova.compute.manager [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing instance network info cache due to event network-changed-c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 770.891561] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquiring lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.891561] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Acquired lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.891875] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Refreshing network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 770.920165] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831815, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070181} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.920576] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.921955] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6590a0ff-8387-48a8-ad86-ade3c4a15d3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.952426] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 1fb67ac1-c0b7-48b9-8562-d457d46709bc/1fb67ac1-c0b7-48b9-8562-d457d46709bc.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.956950] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86eea0cd-e8e9-4071-9470-c3dafef0c16b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.977596] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 770.977596] env[62914]: value = "task-4831817" [ 770.977596] env[62914]: _type = "Task" [ 770.977596] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.987337] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831817, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.999267] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831814, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.115782] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cdd70b-40f8-47ec-93ac-6bc0aeda6d68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.123998] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0518b67f-df15-4ee0-8210-72eebb029539 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.166202] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66a0b61-1f08-44ed-8cd4-6cebcf0304fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.175388] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00636378-797a-476c-ad9a-92c6e96b0241 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.190877] env[62914]: DEBUG oslo_concurrency.lockutils [req-803a4e17-d90e-43ac-a761-865b19322ad2 req-ed30c2f8-c0f1-4f31-8157-eb2b7f2e2a7c service nova] Releasing lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.191529] env[62914]: DEBUG nova.compute.provider_tree [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.247379] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831816, 'name': PowerOffVM_Task, 'duration_secs': 0.265172} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.247778] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 771.247964] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 771.248265] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-147aa407-0660-4ba2-85ae-2bcc5315bee6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.310406] env[62914]: DEBUG nova.compute.manager [req-64a0430a-29c8-4f93-a2f9-731c1b58e698 req-cc8a7d8d-4b0d-43f1-9563-a3f5e4bf224f service nova] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Received event network-vif-deleted-e8918472-71f8-4ab8-ae0e-d5333ff21e08 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 771.316921] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831811, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.336853] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 771.336853] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 771.337381] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore2] 968cbfbe-1570-48d6-890d-c7a680855574 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.338380] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e925eb2-d1d7-470e-a2c9-84e2b8c0f3d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.350669] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 771.350669] env[62914]: value = "task-4831819" [ 771.350669] env[62914]: _type = "Task" [ 771.350669] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.363205] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831819, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.458978] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.459305] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.495331] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.504140] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831814, 'name': CreateVM_Task, 'duration_secs': 1.420403} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.504507] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e061304c-998b-4331-b60d-809916844a6f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 771.505463] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.505763] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.506279] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 771.506638] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63e48ff9-105a-42a8-addf-8b3b6fbe873f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.512922] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 771.512922] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529d1e17-6c18-eff5-f529-e3939a357244" [ 771.512922] env[62914]: _type = "Task" [ 771.512922] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.526227] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529d1e17-6c18-eff5-f529-e3939a357244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.694829] env[62914]: DEBUG nova.scheduler.client.report [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.723986] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updated VIF entry in instance network info cache for port c68776d2-73ad-4ec2-b114-31f5878098d8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 771.724574] env[62914]: DEBUG nova.network.neutron [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updating instance_info_cache with network_info: [{"id": "c68776d2-73ad-4ec2-b114-31f5878098d8", "address": "fa:16:3e:20:21:c8", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68776d2-73", "ovs_interfaceid": "c68776d2-73ad-4ec2-b114-31f5878098d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.790318] env[62914]: DEBUG nova.network.neutron [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.815446] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831811, 'name': Rename_Task, 'duration_secs': 1.534336} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.815446] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 771.815446] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8d7152c-b94a-490c-aa71-1f8be51265d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.824101] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 771.824101] env[62914]: value = "task-4831820" [ 771.824101] env[62914]: _type = "Task" [ 771.824101] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.837652] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.864029] env[62914]: DEBUG oslo_vmware.api [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4831819, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241266} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.864029] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 771.864029] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 771.864029] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 771.864029] env[62914]: INFO nova.compute.manager [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Took 1.16 seconds to destroy the instance on the hypervisor. [ 771.864029] env[62914]: DEBUG oslo.service.loopingcall [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.864029] env[62914]: DEBUG nova.compute.manager [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 771.864029] env[62914]: DEBUG nova.network.neutron [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 771.996917] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831817, 'name': ReconfigVM_Task, 'duration_secs': 0.931167} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.000353] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 1fb67ac1-c0b7-48b9-8562-d457d46709bc/1fb67ac1-c0b7-48b9-8562-d457d46709bc.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.001367] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c0d15b7-3296-4512-aa8c-ff6f29b72973 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.008803] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 772.008803] env[62914]: value = "task-4831821" [ 772.008803] env[62914]: _type = "Task" [ 772.008803] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.019248] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831821, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.028823] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529d1e17-6c18-eff5-f529-e3939a357244, 'name': SearchDatastore_Task, 'duration_secs': 0.01287} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.029220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.029513] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.029836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.030048] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.030359] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.030588] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83192a43-d7a5-4d57-ac4b-4de3986c1e35 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.040557] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.040557] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 772.041308] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a14662a-5121-42db-9012-c77427a839ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.048678] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 772.048678] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f39e03-4c86-3604-d00b-4fa532a5de46" [ 772.048678] env[62914]: _type = "Task" [ 772.048678] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.058947] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f39e03-4c86-3604-d00b-4fa532a5de46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.203274] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.203766] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 772.207856] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.434s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.208095] env[62914]: DEBUG nova.objects.instance [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lazy-loading 'resources' on Instance uuid 1342d15d-fbef-4709-adf6-f827bc13d3ca {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.228192] env[62914]: DEBUG oslo_concurrency.lockutils [req-6cc0477c-aab2-4a4c-837f-8567884129b6 req-2c899f18-4b3f-440e-b284-6e4510a4c4d8 service nova] Releasing lock "refresh_cache-29a177e4-b5d7-4249-8fc5-2316f6891536" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.294524] env[62914]: INFO nova.compute.manager [-] [instance: 4496a977-30b2-4323-a561-884633958cdf] Took 1.56 seconds to deallocate network for instance. [ 772.337681] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831820, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.522358] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831821, 'name': Rename_Task, 'duration_secs': 0.23429} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.522659] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 772.522922] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba40eb46-983a-4bfa-b2c0-6e792b1f8ff7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.532154] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 772.532154] env[62914]: value = "task-4831822" [ 772.532154] env[62914]: _type = "Task" [ 772.532154] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.542094] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.561027] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f39e03-4c86-3604-d00b-4fa532a5de46, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.561930] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6ca6c23-13f7-4e69-8b3e-f2acbf63d5e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.570202] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 772.570202] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52449564-0445-89ac-0a03-a3281dc00e4f" [ 772.570202] env[62914]: _type = "Task" [ 772.570202] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.580538] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52449564-0445-89ac-0a03-a3281dc00e4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.590152] env[62914]: DEBUG nova.compute.manager [req-f6ee12bf-5e8b-4361-9685-f939ebe78859 req-cefe4d87-e2be-4f46-b494-eec4cf8f0b1c service nova] [instance: 4496a977-30b2-4323-a561-884633958cdf] Received event network-vif-deleted-8eb60abb-6f7b-4b7e-b102-0d1d2fd78e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 772.708800] env[62914]: DEBUG nova.network.neutron [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.716472] env[62914]: DEBUG nova.compute.utils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.719480] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 772.719480] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 772.775022] env[62914]: DEBUG nova.policy [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec5db4449cd54254b8effdd90bbac248', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2cb4ab64c41d4df2b72b26c54a0bdccb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 772.809383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.840190] env[62914]: DEBUG oslo_vmware.api [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831820, 'name': PowerOnVM_Task, 'duration_secs': 0.797471} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.840258] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 772.840476] env[62914]: INFO nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Took 11.35 seconds to spawn the instance on the hypervisor. [ 772.840658] env[62914]: DEBUG nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 772.841581] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73930179-5ab8-486a-908e-ab84b3a76860 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.044870] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831822, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.077102] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Successfully created port: c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.084477] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52449564-0445-89ac-0a03-a3281dc00e4f, 'name': SearchDatastore_Task, 'duration_secs': 0.018147} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.088291] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.088749] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e061304c-998b-4331-b60d-809916844a6f/e061304c-998b-4331-b60d-809916844a6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 773.089362] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2269b978-e927-44b8-bbb8-623fc30f5f4c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.099804] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 773.099804] env[62914]: value = "task-4831823" [ 773.099804] env[62914]: _type = "Task" [ 773.099804] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.114026] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.214819] env[62914]: INFO nova.compute.manager [-] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Took 1.35 seconds to deallocate network for instance. [ 773.219761] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 773.368120] env[62914]: INFO nova.compute.manager [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Took 36.31 seconds to build instance. [ 773.400057] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5108cf-18aa-4a41-99ed-2f41cfce946c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.408891] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f503865-7b91-47b0-a7bb-619e783c53c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.450938] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198b8c8e-411b-4662-ad2b-178495a717b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.460574] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5156835c-418d-4f56-8a6d-1b789c0b8d0f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.484060] env[62914]: DEBUG nova.compute.provider_tree [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.549883] env[62914]: DEBUG oslo_vmware.api [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831822, 'name': PowerOnVM_Task, 'duration_secs': 0.640874} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.549883] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 773.550062] env[62914]: INFO nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Took 9.23 seconds to spawn the instance on the hypervisor. [ 773.550200] env[62914]: DEBUG nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 773.551315] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8694f7b-9342-486c-85e6-cf2d7ba782f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.616888] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831823, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.732618] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.876045] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6196a21-2de9-472d-bb15-84b0314f97fd tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.214s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.989227] env[62914]: DEBUG nova.scheduler.client.report [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.076385] env[62914]: INFO nova.compute.manager [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Took 35.53 seconds to build instance. [ 774.116977] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660362} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.117355] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e061304c-998b-4331-b60d-809916844a6f/e061304c-998b-4331-b60d-809916844a6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 774.117847] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.118908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46aa584e-deb3-4909-90bf-90ccd78cef8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.127663] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 774.127663] env[62914]: value = "task-4831824" [ 774.127663] env[62914]: _type = "Task" [ 774.127663] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.149784] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831824, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.236051] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 774.271122] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 774.271513] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 774.271750] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.272056] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 774.272283] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.272501] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 774.272798] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 774.273043] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 774.273294] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 774.273672] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 774.273937] env[62914]: DEBUG nova.virt.hardware [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 774.275337] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5a3c0b-4cb1-4c21-b2f6-9c8ad3fa44f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.286891] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd926428-89f1-4a94-b917-bfebf2fff8b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.382822] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 774.493561] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.286s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.495938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.191s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.497925] env[62914]: INFO nova.compute.claims [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.537718] env[62914]: INFO nova.scheduler.client.report [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Deleted allocations for instance 1342d15d-fbef-4709-adf6-f827bc13d3ca [ 774.578737] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d9ea4721-1382-4a1d-8612-323f5a21db01 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.042s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.642052] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831824, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097721} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.642052] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.642524] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5d8f56-facf-448e-be2c-0de4bd5cebb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.676190] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] e061304c-998b-4331-b60d-809916844a6f/e061304c-998b-4331-b60d-809916844a6f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.676951] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04999552-8dbf-4a37-8ee6-2041040e5a95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.700770] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 774.700770] env[62914]: value = "task-4831825" [ 774.700770] env[62914]: _type = "Task" [ 774.700770] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.711818] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.914295] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.044207] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Successfully updated port: c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.049236] env[62914]: DEBUG oslo_concurrency.lockutils [None req-48fe2c69-3f29-4ec6-8ada-1d74cc78d7fd tempest-ServerGroupTestJSON-730209564 tempest-ServerGroupTestJSON-730209564-project-member] Lock "1342d15d-fbef-4709-adf6-f827bc13d3ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.801s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.084524] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 775.156678] env[62914]: DEBUG nova.compute.manager [req-78fdb6c0-5576-49bc-a321-67fa511e0e9a req-c07e01b4-d895-4b49-a9a0-af1935821759 service nova] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Received event network-vif-deleted-392f2779-6bcf-4d28-9f9b-bd4279812dc7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 775.215773] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831825, 'name': ReconfigVM_Task, 'duration_secs': 0.444661} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.216420] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Reconfigured VM instance instance-00000032 to attach disk [datastore2] e061304c-998b-4331-b60d-809916844a6f/e061304c-998b-4331-b60d-809916844a6f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.217253] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ddfbcbd-4d75-484a-a211-4ab4245797ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.223958] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 775.223958] env[62914]: value = "task-4831826" [ 775.223958] env[62914]: _type = "Task" [ 775.223958] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.232991] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831826, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.548601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.549313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.549313] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 775.635156] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.738254] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831826, 'name': Rename_Task, 'duration_secs': 0.144839} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.738836] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 775.738836] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e811aa5f-65f9-497d-bc4a-5690c89fd6a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.749517] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 775.749517] env[62914]: value = "task-4831827" [ 775.749517] env[62914]: _type = "Task" [ 775.749517] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.758872] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.118499] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.148020] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3cdc5e-4688-4bf4-9a40-19181291c06b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.156056] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070eb41d-ee96-4751-8fd0-22727e951eff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.198818] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba2ff15-7fa6-4656-8ff3-b6bd59febf52 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.215022] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ceea20-6c3c-46ac-afa1-f1c29d15dae7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.234134] env[62914]: DEBUG nova.compute.provider_tree [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.261383] env[62914]: DEBUG oslo_vmware.api [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831827, 'name': PowerOnVM_Task, 'duration_secs': 0.48924} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.261692] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 776.261905] env[62914]: INFO nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Took 9.12 seconds to spawn the instance on the hypervisor. [ 776.262110] env[62914]: DEBUG nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 776.262911] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5cae17-af3c-4a9d-928f-8f752eb15b1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.377246] env[62914]: DEBUG nova.network.neutron [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.737504] env[62914]: DEBUG nova.scheduler.client.report [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.790669] env[62914]: INFO nova.compute.manager [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Took 37.54 seconds to build instance. [ 776.881257] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.881257] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Instance network_info: |[{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 776.881257] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:4b:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba0caf51-f398-43a4-b2b3-f53480254d5f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2f62cb3-f405-432b-9d8c-8c08ea54e240', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.887915] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Creating folder: Project (2cb4ab64c41d4df2b72b26c54a0bdccb). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 776.888408] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f12c981b-f834-42eb-a1c5-bc0b7e4a16f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.902218] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Created folder: Project (2cb4ab64c41d4df2b72b26c54a0bdccb) in parent group-v941773. [ 776.902218] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Creating folder: Instances. Parent ref: group-v941928. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 776.902218] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d546abd0-64d2-4b06-aebf-217a917edb54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.913745] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Created folder: Instances in parent group-v941928. [ 776.914640] env[62914]: DEBUG oslo.service.loopingcall [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.914640] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 776.916219] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-209d25d1-9e9b-4b0e-a34a-ed458039b91a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.943324] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.943324] env[62914]: value = "task-4831830" [ 776.943324] env[62914]: _type = "Task" [ 776.943324] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.952647] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831830, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.243996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.244735] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 777.248038] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.607s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.248309] env[62914]: DEBUG nova.objects.instance [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lazy-loading 'resources' on Instance uuid 54185b06-7ccb-4740-a6ee-213bbfa6365b {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.293066] env[62914]: DEBUG oslo_concurrency.lockutils [None req-603ac7c5-d35f-42d7-a8f3-ee2766cc6eff tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.057s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.459074] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831830, 'name': CreateVM_Task, 'duration_secs': 0.404948} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.459274] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 777.460018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.460240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.460564] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 777.460831] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6828b23-5305-45e5-8bbb-110197e5b992 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.468207] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 777.468207] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5e28-6529-886f-4838-489eef00191d" [ 777.468207] env[62914]: _type = "Task" [ 777.468207] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.477360] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5e28-6529-886f-4838-489eef00191d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.541687] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-vif-plugged-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 777.542538] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Acquiring lock "1d74504f-b641-42c6-a420-c80614d69b23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.543227] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Lock "1d74504f-b641-42c6-a420-c80614d69b23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.543617] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Lock "1d74504f-b641-42c6-a420-c80614d69b23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.544071] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] No waiting events found dispatching network-vif-plugged-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 777.545281] env[62914]: WARNING nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received unexpected event network-vif-plugged-c2f62cb3-f405-432b-9d8c-8c08ea54e240 for instance with vm_state building and task_state spawning. [ 777.546653] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 777.546653] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing instance network info cache due to event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 777.546653] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.546653] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.546653] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 777.751556] env[62914]: DEBUG nova.compute.utils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.758372] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 777.759035] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 777.795949] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 777.826194] env[62914]: DEBUG nova.policy [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60ed60579cd74494959b0a7f306f2832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '319610053c8a4ca19dcb0c0b3e6b6596', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 777.982279] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5e28-6529-886f-4838-489eef00191d, 'name': SearchDatastore_Task, 'duration_secs': 0.01273} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.985120] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.985378] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.986039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.986039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.986039] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.987154] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4acb7c4-4bc8-41b8-b0ba-943acec84976 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.002697] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.002927] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 778.004288] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b50f12-3d0c-4a5c-a514-5e468dfa247b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.010050] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 778.010050] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527331d3-8671-1bbb-f461-46a50678c75f" [ 778.010050] env[62914]: _type = "Task" [ 778.010050] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.023680] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527331d3-8671-1bbb-f461-46a50678c75f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.255907] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 778.284020] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Successfully created port: 08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.329113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.331698] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd16b618-d1eb-4025-9c00-911777420ca5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.339781] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b392957-0931-449f-8e42-11042e84ebb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.387034] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb7f715-282f-4121-83b4-5ae054439885 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.394442] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2b1ce0-6613-4e73-b8ab-1cbf27407d79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.409517] env[62914]: DEBUG nova.compute.provider_tree [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.497983] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updated VIF entry in instance network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 778.497983] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.525547] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527331d3-8671-1bbb-f461-46a50678c75f, 'name': SearchDatastore_Task, 'duration_secs': 0.024548} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.526511] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e86dce8e-d776-47f2-970a-3aab71c0fdb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.535046] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 778.535046] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258929f-6c07-b8ea-ff1d-a325edb38c50" [ 778.535046] env[62914]: _type = "Task" [ 778.535046] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.551527] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258929f-6c07-b8ea-ff1d-a325edb38c50, 'name': SearchDatastore_Task, 'duration_secs': 0.011586} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.551527] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.551666] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/1d74504f-b641-42c6-a420-c80614d69b23.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 778.554148] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-499a4819-115a-4984-bca9-8fedfa6f871e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.566346] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 778.566346] env[62914]: value = "task-4831831" [ 778.566346] env[62914]: _type = "Task" [ 778.566346] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.576304] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.915243] env[62914]: DEBUG nova.scheduler.client.report [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.001351] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.001758] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-changed-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 779.001758] env[62914]: DEBUG nova.compute.manager [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing instance network info cache due to event network-changed-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 779.001906] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.002119] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.002341] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing network info cache for port 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 779.077194] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831831, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.177136] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b965ce9c-53eb-4802-a5a7-f29c2c1b74ec tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "06c7a7df-05b0-4ed3-a574-65991f1d3aec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.179043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b965ce9c-53eb-4802-a5a7-f29c2c1b74ec tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "06c7a7df-05b0-4ed3-a574-65991f1d3aec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.272860] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 779.313754] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.315475] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.315899] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.315899] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.315899] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.316639] env[62914]: DEBUG nova.virt.hardware [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.319297] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57ae08b-f24b-4753-9f81-3a25feac4b9b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.331749] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb088d0-fe8d-49b3-a673-2ab34b3dee04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.421020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.423769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.877s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.424819] env[62914]: DEBUG nova.objects.instance [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'resources' on Instance uuid 12aa02f0-a232-427a-80ba-1faa12c4d43a {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.453917] env[62914]: INFO nova.scheduler.client.report [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Deleted allocations for instance 54185b06-7ccb-4740-a6ee-213bbfa6365b [ 779.580594] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526805} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.581166] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/1d74504f-b641-42c6-a420-c80614d69b23.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 779.581166] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.581455] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8e2f919-bbc9-404d-9ff1-f6d14cc5876d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.590078] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 779.590078] env[62914]: value = "task-4831832" [ 779.590078] env[62914]: _type = "Task" [ 779.590078] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.605459] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.967535] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3917d2a9-bc74-4a28-a611-872fdc809f3e tempest-ServersWithSpecificFlavorTestJSON-184799251 tempest-ServersWithSpecificFlavorTestJSON-184799251-project-member] Lock "54185b06-7ccb-4740-a6ee-213bbfa6365b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.433s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.115679] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122545} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.116112] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.116927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40878db-0fc3-49b0-b0c3-b428e3c47b4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.145743] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/1d74504f-b641-42c6-a420-c80614d69b23.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.150019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f126b9bf-d7a1-40b6-ad87-329fc11e2cb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.175528] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 780.175528] env[62914]: value = "task-4831833" [ 780.175528] env[62914]: _type = "Task" [ 780.175528] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.191083] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831833, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.212501] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Successfully updated port: 08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.503202] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updated VIF entry in instance network info cache for port 06e36426-302a-4bcd-bb7a-f9d6dd3a72c2. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 780.503980] env[62914]: DEBUG nova.network.neutron [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.638850] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1426655f-067b-4d29-9b34-33f7554c3161 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.649897] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e3620-5a5a-4415-bfc1-938fe3a70f57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.694104] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d51c90-a793-4694-a906-947b08acc8c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.707246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0c7c95-9953-4612-9c28-8f656123600b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.711964] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.726599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.726599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.726599] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 780.726981] env[62914]: DEBUG nova.compute.provider_tree [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.008875] env[62914]: DEBUG oslo_concurrency.lockutils [req-029c7bc0-52c0-45c1-8660-e05365c55aca req-f5d1242c-ed66-4218-b7a0-39dbd68b1c47 service nova] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.202977] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831833, 'name': ReconfigVM_Task, 'duration_secs': 0.674257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.207899] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/1d74504f-b641-42c6-a420-c80614d69b23.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.208970] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27d56d02-488b-4264-a489-d56bb1ad8d83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.223042] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 781.223042] env[62914]: value = "task-4831834" [ 781.223042] env[62914]: _type = "Task" [ 781.223042] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.234415] env[62914]: DEBUG nova.scheduler.client.report [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.244751] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831834, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.298787] env[62914]: DEBUG nova.compute.manager [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Received event network-vif-plugged-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 781.300074] env[62914]: DEBUG oslo_concurrency.lockutils [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] Acquiring lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.300477] env[62914]: DEBUG oslo_concurrency.lockutils [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.300736] env[62914]: DEBUG oslo_concurrency.lockutils [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.300988] env[62914]: DEBUG nova.compute.manager [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] No waiting events found dispatching network-vif-plugged-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 781.301440] env[62914]: WARNING nova.compute.manager [req-a5f6017e-83f6-47e4-8e25-cbde416022fd req-307874ec-d6f1-4c6d-a623-465b07090df9 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Received unexpected event network-vif-plugged-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 for instance with vm_state building and task_state spawning. [ 781.314122] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.706477] env[62914]: DEBUG nova.network.neutron [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Updating instance_info_cache with network_info: [{"id": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "address": "fa:16:3e:27:97:c9", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e0b9bd-1c", "ovs_interfaceid": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.739285] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831834, 'name': Rename_Task, 'duration_secs': 0.193656} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.739848] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 781.740750] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b36d7721-4105-45bf-bd29-f9495b169862 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.748405] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.751525] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 781.751525] env[62914]: value = "task-4831835" [ 781.751525] env[62914]: _type = "Task" [ 781.751525] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.752299] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.747s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.752299] env[62914]: DEBUG nova.objects.instance [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] [instance: cead3557-080d-4956-a957-cac449bb69f6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 781.765050] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.797724] env[62914]: INFO nova.scheduler.client.report [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted allocations for instance 12aa02f0-a232-427a-80ba-1faa12c4d43a [ 782.006598] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.011099] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.060637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.061073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.209617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.210821] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Instance network_info: |[{"id": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "address": "fa:16:3e:27:97:c9", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e0b9bd-1c", "ovs_interfaceid": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 782.210821] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:97:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8abee039-d93e-48a7-8911-6416a3e1ff30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 782.222844] env[62914]: DEBUG oslo.service.loopingcall [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.224855] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 782.224965] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd3641a4-a381-4da6-8d49-01aafb51f284 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.261415] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 782.261415] env[62914]: value = "task-4831836" [ 782.261415] env[62914]: _type = "Task" [ 782.261415] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.275464] env[62914]: DEBUG oslo_vmware.api [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831835, 'name': PowerOnVM_Task, 'duration_secs': 0.490808} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.279060] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 782.279319] env[62914]: INFO nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Took 8.04 seconds to spawn the instance on the hypervisor. [ 782.279510] env[62914]: DEBUG nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 782.279770] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831836, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.281146] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19024be4-761f-4d03-b4a6-9010f4942111 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.305951] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5239dcd0-c558-43cc-8209-d1eb7a2f5675 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "12aa02f0-a232-427a-80ba-1faa12c4d43a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.103s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.531650] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "982936be-3cb1-4930-b135-8fc2019c5216" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.532625] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.765120] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01760891-7d50-452a-b1bd-57867fa6df85 tempest-ServersAdmin275Test-957959395 tempest-ServersAdmin275Test-957959395-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.765120] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.308s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.778684] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831836, 'name': CreateVM_Task, 'duration_secs': 0.439228} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.780882] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 782.781390] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.781702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.782219] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 782.783044] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de1b51e-5f19-4615-8986-410a0645cb3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.790565] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 782.790565] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a3644c-2bd9-b9ed-4e56-cb5c7fb3013b" [ 782.790565] env[62914]: _type = "Task" [ 782.790565] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.802247] env[62914]: INFO nova.compute.manager [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Took 39.38 seconds to build instance. [ 782.806843] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a3644c-2bd9-b9ed-4e56-cb5c7fb3013b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.062438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.062643] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.065283] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.065626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.275394] env[62914]: INFO nova.compute.claims [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.303608] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a3644c-2bd9-b9ed-4e56-cb5c7fb3013b, 'name': SearchDatastore_Task, 'duration_secs': 0.020295} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.303868] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.304146] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.304416] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.304847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.304847] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.305096] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b72e563-655c-44d5-b342-4fe86115a2f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.311391] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f639da85-5170-4027-bc14-c3a66c485170 tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.147s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.334812] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.334812] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 783.334812] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-965850a8-3c7e-4b28-89de-896c948f1de4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.340049] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 783.340049] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521ed647-3e2a-67cf-cb0f-b1c16ba8f135" [ 783.340049] env[62914]: _type = "Task" [ 783.340049] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.356122] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521ed647-3e2a-67cf-cb0f-b1c16ba8f135, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.357834] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01154921-1c3b-4ec4-b100-3ab319d30574 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.365288] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 783.365288] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bce1d-d033-ff95-370a-de58adcb4a83" [ 783.365288] env[62914]: _type = "Task" [ 783.365288] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.374512] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bce1d-d033-ff95-370a-de58adcb4a83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.664645] env[62914]: DEBUG nova.compute.manager [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Received event network-changed-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 783.664861] env[62914]: DEBUG nova.compute.manager [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Refreshing instance network info cache due to event network-changed-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 783.665235] env[62914]: DEBUG oslo_concurrency.lockutils [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] Acquiring lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.665468] env[62914]: DEBUG oslo_concurrency.lockutils [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] Acquired lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.665563] env[62914]: DEBUG nova.network.neutron [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Refreshing network info cache for port 08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 783.783943] env[62914]: INFO nova.compute.resource_tracker [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating resource usage from migration 8e4a4d60-6c1d-42ca-b081-c15b4d2a896c [ 783.817561] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 783.881964] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bce1d-d033-ff95-370a-de58adcb4a83, 'name': SearchDatastore_Task, 'duration_secs': 0.010253} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.881964] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.881964] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b477cd62-49c2-4e3c-98ea-b4154dda4986/b477cd62-49c2-4e3c-98ea-b4154dda4986.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 783.882907] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a488bd0d-2abd-4c8b-8f67-37b61c4de01d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.893403] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 783.893403] env[62914]: value = "task-4831837" [ 783.893403] env[62914]: _type = "Task" [ 783.893403] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.902687] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.081144] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.081637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.208550] env[62914]: INFO nova.compute.manager [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Rescuing [ 784.208835] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.208993] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.209191] env[62914]: DEBUG nova.network.neutron [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 784.342757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.406737] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831837, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.424338] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715358d1-3300-4f54-93cc-eb7fe98b97ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.435505] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27de8896-98ac-47b7-a608-8291ef6b2802 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.471911] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b98d50-4e9a-4937-acef-4fe066011feb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.480597] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6c50b2-ae9f-439c-8983-80c4afb0d0aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.495149] env[62914]: DEBUG nova.compute.provider_tree [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.639577] env[62914]: DEBUG nova.network.neutron [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Updated VIF entry in instance network info cache for port 08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 784.639577] env[62914]: DEBUG nova.network.neutron [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Updating instance_info_cache with network_info: [{"id": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "address": "fa:16:3e:27:97:c9", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e0b9bd-1c", "ovs_interfaceid": "08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.912025] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532854} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.912025] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b477cd62-49c2-4e3c-98ea-b4154dda4986/b477cd62-49c2-4e3c-98ea-b4154dda4986.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 784.912025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.912025] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0d33b7d-715e-471c-83b7-06fe21f6be53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.917516] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 784.917516] env[62914]: value = "task-4831838" [ 784.917516] env[62914]: _type = "Task" [ 784.917516] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.928493] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831838, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.000311] env[62914]: DEBUG nova.scheduler.client.report [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.143769] env[62914]: DEBUG oslo_concurrency.lockutils [req-ea5c82be-31c1-434b-bba5-221f467239ea req-57f02f2e-6558-460d-8882-012b4d6a2b64 service nova] Releasing lock "refresh_cache-b477cd62-49c2-4e3c-98ea-b4154dda4986" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.233186] env[62914]: DEBUG nova.network.neutron [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.434646] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831838, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071613} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.434646] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.434646] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37903879-4a68-472c-ad1f-6df331f5d091 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.462076] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] b477cd62-49c2-4e3c-98ea-b4154dda4986/b477cd62-49c2-4e3c-98ea-b4154dda4986.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.462477] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5644662f-9eb1-4086-a090-0cbfd70ecb9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.486960] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 785.486960] env[62914]: value = "task-4831839" [ 785.486960] env[62914]: _type = "Task" [ 785.486960] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.497061] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.507310] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.742s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.507558] env[62914]: INFO nova.compute.manager [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Migrating [ 785.516656] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.133s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.736792] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.001360] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.027672] env[62914]: INFO nova.compute.claims [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.035110] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.035338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.035524] env[62914]: DEBUG nova.network.neutron [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.282021] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 786.282021] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc1896c1-aab6-495e-80f6-8bac29f1bc96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.288755] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 786.288755] env[62914]: value = "task-4831840" [ 786.288755] env[62914]: _type = "Task" [ 786.288755] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.300617] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.502877] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831839, 'name': ReconfigVM_Task, 'duration_secs': 0.83489} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.503506] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Reconfigured VM instance instance-00000034 to attach disk [datastore2] b477cd62-49c2-4e3c-98ea-b4154dda4986/b477cd62-49c2-4e3c-98ea-b4154dda4986.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.504277] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0d37788-af1f-42ab-a861-3368eab25616 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.514097] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 786.514097] env[62914]: value = "task-4831841" [ 786.514097] env[62914]: _type = "Task" [ 786.514097] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.529511] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831841, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.541857] env[62914]: INFO nova.compute.resource_tracker [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating resource usage from migration 5affa2b3-57aa-4caf-b07c-b4616c9bb3c4 [ 786.806604] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831840, 'name': PowerOffVM_Task, 'duration_secs': 0.19817} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.809530] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 786.810819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c33e7a0-dfda-449c-a36b-1f832086752f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.838872] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5b93e8-9a86-4053-ab13-3373c3d2192e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.886207] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 786.886207] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3762350a-1583-4915-9f3e-876af8c5a66c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.900519] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 786.900519] env[62914]: value = "task-4831842" [ 786.900519] env[62914]: _type = "Task" [ 786.900519] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.913410] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 786.913938] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.914269] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.914430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.914613] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.915425] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be556e15-59f3-4764-a349-5a0cadabed3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.929025] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.929025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 786.929025] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36f6c342-dfbe-418c-9d37-270a0c8e190a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.939452] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 786.939452] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528bce05-8b29-3c3b-0c25-9b5f0ac014c9" [ 786.939452] env[62914]: _type = "Task" [ 786.939452] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.950033] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528bce05-8b29-3c3b-0c25-9b5f0ac014c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.027031] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831841, 'name': Rename_Task, 'duration_secs': 0.169497} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.027031] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 787.027031] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c115c0a0-bef7-42b1-87aa-908831182caa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.039581] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 787.039581] env[62914]: value = "task-4831843" [ 787.039581] env[62914]: _type = "Task" [ 787.039581] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.055473] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.148589] env[62914]: DEBUG nova.network.neutron [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.263141] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f6af23-3d4c-425b-9bc9-89ccdd877cd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.272849] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b5d2fb-bf74-4f2c-a8d0-ae459df205bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.318181] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c7a5c-52dc-4235-980e-4b8a9b6ee1e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.326785] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24093400-fc7e-4bb8-99b2-01bef75d51f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.341509] env[62914]: DEBUG nova.compute.provider_tree [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.405362] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.405973] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.451822] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528bce05-8b29-3c3b-0c25-9b5f0ac014c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009971} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.452692] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba976214-983b-44a4-a1e5-6771fc395cbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.459087] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 787.459087] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a7dc68-51fc-d441-49e2-5592ce2a228d" [ 787.459087] env[62914]: _type = "Task" [ 787.459087] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.468190] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a7dc68-51fc-d441-49e2-5592ce2a228d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.562536] env[62914]: DEBUG oslo_vmware.api [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831843, 'name': PowerOnVM_Task, 'duration_secs': 0.495398} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.562938] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 787.563153] env[62914]: INFO nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Took 8.29 seconds to spawn the instance on the hypervisor. [ 787.563340] env[62914]: DEBUG nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 787.564326] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e9d48b-2555-41e8-8fa6-7538792b306c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.651906] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.845291] env[62914]: DEBUG nova.scheduler.client.report [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 787.972019] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a7dc68-51fc-d441-49e2-5592ce2a228d, 'name': SearchDatastore_Task, 'duration_secs': 0.011936} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.972320] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.972583] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 787.972859] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eff353f1-dc55-4c42-9749-9b2df8641500 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.982453] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 787.982453] env[62914]: value = "task-4831844" [ 787.982453] env[62914]: _type = "Task" [ 787.982453] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.993392] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.091154] env[62914]: INFO nova.compute.manager [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Took 40.85 seconds to build instance. [ 788.354150] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.838s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.354422] env[62914]: INFO nova.compute.manager [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Migrating [ 788.368742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.691s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.368742] env[62914]: DEBUG nova.objects.instance [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lazy-loading 'resources' on Instance uuid cead3557-080d-4956-a957-cac449bb69f6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.496823] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831844, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.596149] env[62914]: DEBUG oslo_concurrency.lockutils [None req-81d3f29b-cd32-49e5-a578-0a16de65f975 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.344s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.883669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.883669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.883669] env[62914]: DEBUG nova.network.neutron [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 788.995598] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.880598} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.998901] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 789.002377] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad8c77b-ecce-4810-9283-469b44b24929 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.033204] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.036402] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e9b4e73-21eb-4284-a135-cdc67ef97fce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.057955] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 789.057955] env[62914]: value = "task-4831845" [ 789.057955] env[62914]: _type = "Task" [ 789.057955] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.068965] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831845, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.103099] env[62914]: DEBUG nova.compute.manager [None req-b965ce9c-53eb-4802-a5a7-f29c2c1b74ec tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 06c7a7df-05b0-4ed3-a574-65991f1d3aec] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 789.170825] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b7ea48-b189-4a05-a092-e20114c3e70a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.196048] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 789.574680] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.604024] env[62914]: DEBUG nova.compute.manager [None req-b965ce9c-53eb-4802-a5a7-f29c2c1b74ec tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 06c7a7df-05b0-4ed3-a574-65991f1d3aec] Instance disappeared before build. {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 789.645878] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2431a8b3-09b3-4914-935f-9c44680be402 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.662639] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b2e182-6328-4fba-b000-2801ea2afe45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.696478] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911a97e6-89d3-43ec-ade3-fb690fba8dcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.707532] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 789.707532] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e238429e-f828-4b31-b07d-48233a4df58a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.710951] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cbb35d-3481-4b18-a4f7-da5e19ec5a26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.728751] env[62914]: DEBUG nova.compute.provider_tree [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.732970] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 789.732970] env[62914]: value = "task-4831846" [ 789.732970] env[62914]: _type = "Task" [ 789.732970] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.748022] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.864022] env[62914]: DEBUG nova.network.neutron [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.078023] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831845, 'name': ReconfigVM_Task, 'duration_secs': 0.951857} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.078023] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.078023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153f2eb8-262a-48b0-b58a-3f9112a559b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.108168] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb4123c5-afa9-4160-88a4-17c27e46dddc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.125623] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 790.125623] env[62914]: value = "task-4831847" [ 790.125623] env[62914]: _type = "Task" [ 790.125623] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.131646] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b965ce9c-53eb-4802-a5a7-f29c2c1b74ec tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "06c7a7df-05b0-4ed3-a574-65991f1d3aec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.953s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.141977] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.234803] env[62914]: DEBUG nova.scheduler.client.report [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.249329] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831846, 'name': PowerOffVM_Task, 'duration_secs': 0.2924} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.249469] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 790.249663] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 790.366221] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.636485] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.643723] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 790.743288] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.378s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.745918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.631s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.746212] env[62914]: DEBUG nova.objects.instance [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 790.758516] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 790.758846] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 790.759053] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.759287] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 790.759457] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.759648] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 790.760690] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 790.760690] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 790.760690] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 790.760690] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 790.760857] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 790.769531] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b819266d-a337-4e0b-9e1b-94bfd5be73c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.782083] env[62914]: INFO nova.scheduler.client.report [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Deleted allocations for instance cead3557-080d-4956-a957-cac449bb69f6 [ 790.794575] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 790.794575] env[62914]: value = "task-4831848" [ 790.794575] env[62914]: _type = "Task" [ 790.794575] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.802325] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831848, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.138471] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831847, 'name': ReconfigVM_Task, 'duration_secs': 0.800213} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.139303] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 791.139303] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf44f6dd-69a7-45df-a0d5-01e39556e5be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.146176] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 791.146176] env[62914]: value = "task-4831849" [ 791.146176] env[62914]: _type = "Task" [ 791.146176] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.162529] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.176663] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.293724] env[62914]: DEBUG oslo_concurrency.lockutils [None req-258f5173-abfc-4249-bcce-189e364d8b5a tempest-ServersAdmin275Test-151175646 tempest-ServersAdmin275Test-151175646-project-member] Lock "cead3557-080d-4956-a957-cac449bb69f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.421s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.306158] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831848, 'name': ReconfigVM_Task, 'duration_secs': 0.211177} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.306158] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 791.657758] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831849, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.759356] env[62914]: DEBUG oslo_concurrency.lockutils [None req-32b2057a-8611-4bb5-8cec-1b035dcb5857 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.760173] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.582s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.762757] env[62914]: INFO nova.compute.claims [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:24:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd1b046e-6be2-4ac8-bbb2-0adf61fb18f6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1771667993',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.815212] env[62914]: DEBUG nova.virt.hardware [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.822746] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 791.823102] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-803e08f4-833d-46a5-8da5-2fc8e645defd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.849132] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 791.849132] env[62914]: value = "task-4831850" [ 791.849132] env[62914]: _type = "Task" [ 791.849132] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.862028] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.888033] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2617006d-4d88-4c01-a248-bc0f94f4bff9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.913409] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 792.158937] env[62914]: DEBUG oslo_vmware.api [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831849, 'name': PowerOnVM_Task, 'duration_secs': 0.639783} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.158937] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 792.161415] env[62914]: DEBUG nova.compute.manager [None req-7b08382b-9f9a-4e89-aa95-c1aaaab44ddd tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 792.162023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69a74fb-d2a9-40c6-8cf1-c3655aaa9722 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.362996] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831850, 'name': ReconfigVM_Task, 'duration_secs': 0.176242} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.364404] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 792.364593] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61650b4-31a9-42d0-b475-da993942ed85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.403933] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.407645] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e1c5698-c317-4f32-be8d-588e8799abe9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.421885] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 792.422708] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3e0327e-7d29-40d8-b882-9530f68de286 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.426324] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 792.426324] env[62914]: value = "task-4831851" [ 792.426324] env[62914]: _type = "Task" [ 792.426324] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.440267] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.440673] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 792.440673] env[62914]: value = "task-4831852" [ 792.440673] env[62914]: _type = "Task" [ 792.440673] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.450817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 792.451173] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 792.938636] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831851, 'name': ReconfigVM_Task, 'duration_secs': 0.258971} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.941643] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.942049] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 792.959602] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.959851] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.960108] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.960309] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.960461] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.960610] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.960824] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.961846] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.962148] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.962505] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.962761] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.973070] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-624c85ca-cdbf-4102-93eb-682466296205 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.990867] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 792.990867] env[62914]: value = "task-4831853" [ 792.990867] env[62914]: _type = "Task" [ 792.990867] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.006433] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831853, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.409480] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a785c86-5e33-4dec-ab71-3b64ec444e4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.419498] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb46c911-09c2-4dad-aa13-57744123c0e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.455048] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa916901-2185-41da-9cb3-55c185f36c21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.458235] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a10cc6-a622-423c-9964-a5d48d31003f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.483239] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfa8ad1-347a-4a52-8cd0-5dfa771169c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.488014] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de262f70-13c2-4bf4-89f9-c3a90d3fb214 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.516993] env[62914]: DEBUG nova.compute.provider_tree [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.518443] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 793.529286] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831853, 'name': ReconfigVM_Task, 'duration_secs': 0.146972} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.529434] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 794.024054] env[62914]: DEBUG nova.scheduler.client.report [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.036733] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.036733] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.036733] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.036733] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.037188] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.037188] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.037286] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.037458] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.037640] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.037826] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.038130] env[62914]: DEBUG nova.virt.hardware [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.043803] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfiguring VM instance instance-00000013 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 794.044291] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65fb765f-67bf-4377-a8b1-effdedd26d62 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.069113] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 794.069113] env[62914]: value = "task-4831854" [ 794.069113] env[62914]: _type = "Task" [ 794.069113] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.080972] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831854, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.118392] env[62914]: DEBUG nova.network.neutron [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Port c148a862-a6a8-4c52-b1df-8e764ee00e94 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 794.134803] env[62914]: DEBUG nova.compute.manager [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 794.135033] env[62914]: DEBUG nova.compute.manager [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing instance network info cache due to event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 794.135285] env[62914]: DEBUG oslo_concurrency.lockutils [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.136806] env[62914]: DEBUG oslo_concurrency.lockutils [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.136806] env[62914]: DEBUG nova.network.neutron [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 794.536381] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.776s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.537288] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 794.542762] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.847s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.542762] env[62914]: DEBUG nova.objects.instance [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 794.587350] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831854, 'name': ReconfigVM_Task, 'duration_secs': 0.176018} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.587926] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfigured VM instance instance-00000013 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 794.589624] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29b707e-8f81-40ed-8627-f1f33c6b8a54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.652959] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.657620] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36dd0317-88b3-4dc1-82de-59a3fc157304 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.699630] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 794.699630] env[62914]: value = "task-4831855" [ 794.699630] env[62914]: _type = "Task" [ 794.699630] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.716127] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831855, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.058063] env[62914]: DEBUG nova.compute.utils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.059244] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 795.059493] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 795.148775] env[62914]: DEBUG nova.policy [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60ed60579cd74494959b0a7f306f2832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '319610053c8a4ca19dcb0c0b3e6b6596', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 795.161855] env[62914]: DEBUG nova.network.neutron [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updated VIF entry in instance network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 795.162237] env[62914]: DEBUG nova.network.neutron [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.171378] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.171577] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.171754] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.211403] env[62914]: DEBUG oslo_vmware.api [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831855, 'name': ReconfigVM_Task, 'duration_secs': 0.304556} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.212414] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f/2f7bc586-af68-4d9d-81e2-8247371dfa7f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.212951] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 795.561081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-265cdff8-199e-402a-93f7-7640324b9f6e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.563112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.421s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.565101] env[62914]: INFO nova.compute.claims [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.571484] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 795.674117] env[62914]: DEBUG oslo_concurrency.lockutils [req-1f705d10-7b50-401c-9505-6a93d4d4b744 req-661e5d64-ecb9-44eb-b2c2-dffbe6f649d4 service nova] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.724946] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace1977c-5d28-4a19-a9c5-7fb1a805ac94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.750641] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82ec041-a421-405b-82d8-0d407dcd464a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.771858] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 795.776244] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Successfully created port: 141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.268412] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.268592] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.268772] env[62914]: DEBUG nova.network.neutron [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 796.415363] env[62914]: DEBUG nova.network.neutron [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Port 1c22c510-e137-4ee3-8038-3b784a81e04f binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 796.592405] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 796.632185] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.632555] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.632636] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.632851] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.632939] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.633505] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.633784] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.633963] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.634160] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.634334] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.634515] env[62914]: DEBUG nova.virt.hardware [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.639152] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b920cedf-3d78-404b-8ab3-1c1704dabd48 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.656280] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71875060-e652-4564-8737-97ce42123579 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.877772] env[62914]: DEBUG nova.compute.manager [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 796.877772] env[62914]: DEBUG nova.compute.manager [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing instance network info cache due to event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 796.878380] env[62914]: DEBUG oslo_concurrency.lockutils [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.879367] env[62914]: DEBUG oslo_concurrency.lockutils [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.879367] env[62914]: DEBUG nova.network.neutron [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 797.283029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839c55f4-9e5e-4800-be4b-19259692d133 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.296278] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64572ed1-ef29-475d-aa8f-da42c9a737bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.330951] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb5d06b-9faf-41e4-b490-a3a92b0a0a64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.339667] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df81596b-446d-46f2-90a0-885ac6e02127 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.355809] env[62914]: DEBUG nova.compute.provider_tree [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.445860] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.446125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.446568] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.527105] env[62914]: DEBUG nova.network.neutron [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.804208] env[62914]: DEBUG nova.network.neutron [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updated VIF entry in instance network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 797.808026] env[62914]: DEBUG nova.network.neutron [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.859866] env[62914]: DEBUG nova.scheduler.client.report [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 797.897286] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Successfully updated port: 141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 798.030095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.307467] env[62914]: DEBUG oslo_concurrency.lockutils [req-20bdcc0c-391c-460e-b8da-24afa4c502e9 req-8c78411b-7cff-41a3-aba5-e6e2f4a8c841 service nova] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.365599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.803s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.366452] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 798.370833] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.842s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.370833] env[62914]: DEBUG nova.objects.instance [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lazy-loading 'resources' on Instance uuid fed831e0-4518-4025-89b1-7f6b644e013d {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.400240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.400240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.400240] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 798.522709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.522941] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.523168] env[62914]: DEBUG nova.network.neutron [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 798.560538] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb5df9b-e9d0-4592-b110-43140b668a47 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.583042] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c527ee-c91b-439d-a814-d90e42b721b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.591834] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 798.719117] env[62914]: DEBUG nova.compute.manager [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Received event network-vif-plugged-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 798.719365] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] Acquiring lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.719573] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.719745] env[62914]: DEBUG oslo_concurrency.lockutils [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.720176] env[62914]: DEBUG nova.compute.manager [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] No waiting events found dispatching network-vif-plugged-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 798.720176] env[62914]: WARNING nova.compute.manager [req-0b1e7158-76b7-4972-a3be-a008f81dbf6e req-8981c1d9-1d6b-480f-aa21-be399d49a538 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Received unexpected event network-vif-plugged-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 for instance with vm_state building and task_state spawning. [ 798.875569] env[62914]: DEBUG nova.compute.utils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.879776] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 798.881747] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.956273] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 798.959231] env[62914]: DEBUG nova.policy [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5da18e2dc49746d8a7125efdc106d62b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd271710592bf47b79e16552221fe7107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 799.102364] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 799.106207] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90873447-3f93-41ca-8c3b-6db3db081569 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.114560] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 799.114560] env[62914]: value = "task-4831856" [ 799.114560] env[62914]: _type = "Task" [ 799.114560] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.129770] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.264978] env[62914]: DEBUG nova.network.neutron [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Updating instance_info_cache with network_info: [{"id": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "address": "fa:16:3e:84:50:83", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141c4f39-6b", "ovs_interfaceid": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.383045] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 799.412316] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Successfully created port: 7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.549140] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4581396-3879-4a3f-ba73-2ab029aea984 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.561073] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7812cdfb-a8eb-4c44-9f40-8d11033b905b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.617457] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c9822d-9236-4d54-9da5-f1c2800d81fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.633029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8dbaa2a-b296-463c-9d91-2200ff3a7695 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.637067] env[62914]: DEBUG oslo_vmware.api [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831856, 'name': PowerOnVM_Task, 'duration_secs': 0.4421} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.637387] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 799.637574] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8dec8b09-1f8b-41ca-8141-6921899b9cf1 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance '4cea2bd1-a238-4fb6-bc47-719894461228' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 799.654359] env[62914]: DEBUG nova.compute.provider_tree [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.658660] env[62914]: DEBUG nova.network.neutron [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.766522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.766868] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Instance network_info: |[{"id": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "address": "fa:16:3e:84:50:83", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141c4f39-6b", "ovs_interfaceid": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 799.767325] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:50:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8abee039-d93e-48a7-8911-6416a3e1ff30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.775903] env[62914]: DEBUG oslo.service.loopingcall [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.776297] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 799.776857] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c560a70-f890-4a14-aac9-13f750328d1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.798339] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.798339] env[62914]: value = "task-4831857" [ 799.798339] env[62914]: _type = "Task" [ 799.798339] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.808618] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831857, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.020089] env[62914]: DEBUG nova.compute.manager [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 800.020385] env[62914]: DEBUG nova.compute.manager [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing instance network info cache due to event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 800.021040] env[62914]: DEBUG oslo_concurrency.lockutils [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.021040] env[62914]: DEBUG oslo_concurrency.lockutils [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.022161] env[62914]: DEBUG nova.network.neutron [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 800.157936] env[62914]: DEBUG nova.scheduler.client.report [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.162335] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.314386] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831857, 'name': CreateVM_Task, 'duration_secs': 0.480209} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.314510] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 800.316375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.316375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.316375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 800.316768] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c12142a5-d24d-4df2-b91c-73723378fd84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.322691] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 800.322691] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52766cff-8187-fdea-1805-b65a733b4a91" [ 800.322691] env[62914]: _type = "Task" [ 800.322691] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.333868] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52766cff-8187-fdea-1805-b65a733b4a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.395948] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 800.436248] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.436526] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.436714] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.436927] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.437183] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.437367] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.437598] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.437766] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.437974] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.438195] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.438380] env[62914]: DEBUG nova.virt.hardware [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.439297] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f463949-e1e8-4c7e-bba0-7ea21385f451 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.448970] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245cbf2d-9c8b-49f1-9fe4-12619ecd492b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.668237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.671613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.962s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.673673] env[62914]: INFO nova.compute.claims [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.705990] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437c3407-6275-4ce2-a29d-297d6c7a35c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.736160] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d147c00-e0af-4691-8d99-780081ca08f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.751535] env[62914]: INFO nova.scheduler.client.report [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Deleted allocations for instance fed831e0-4518-4025-89b1-7f6b644e013d [ 800.754814] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "cca4bbf9-8864-4805-b95e-954e6b570eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.754814] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.755231] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 800.844746] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52766cff-8187-fdea-1805-b65a733b4a91, 'name': SearchDatastore_Task, 'duration_secs': 0.012446} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.845176] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.845475] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.845735] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.845886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.846105] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.846432] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31b4ab44-44c4-4b32-8dda-6b836704969a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.860866] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.861143] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 800.862344] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1e9d0dd-e8a8-4e5a-8c05-d41ded50c68c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.872671] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 800.872671] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e56874-ab37-13a9-96b1-8df26ab6320d" [ 800.872671] env[62914]: _type = "Task" [ 800.872671] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.893563] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e56874-ab37-13a9-96b1-8df26ab6320d, 'name': SearchDatastore_Task, 'duration_secs': 0.011354} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.894439] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a2afb98-e7e2-4b03-82eb-27252c052180 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.901649] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 800.901649] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea25d8-7d2e-7f60-2496-cce5c356a4a3" [ 800.901649] env[62914]: _type = "Task" [ 800.901649] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.912683] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea25d8-7d2e-7f60-2496-cce5c356a4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.002092] env[62914]: DEBUG nova.network.neutron [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updated VIF entry in instance network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 801.002092] env[62914]: DEBUG nova.network.neutron [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.137757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "76dfbf82-0ed0-4621-890c-060b187b47e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.137757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.137757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.137757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.137757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.139354] env[62914]: INFO nova.compute.manager [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Terminating instance [ 801.141573] env[62914]: DEBUG nova.compute.manager [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 801.141774] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 801.143023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1c948a-67e6-4c7e-909d-9f8f27464ab8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.151616] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 801.151744] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1b812f8-8d5d-49ea-9d9d-d4862d8f85f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.169732] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 801.169732] env[62914]: value = "task-4831858" [ 801.169732] env[62914]: _type = "Task" [ 801.169732] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.192946] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.267280] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b72ca59d-6b5d-468c-8288-1bf9003c7fc7 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance '2f7bc586-af68-4d9d-81e2-8247371dfa7f' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 801.273019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bdafc2e1-8386-4718-8715-bb9eb484c2ff tempest-ServersTestManualDisk-1910117612 tempest-ServersTestManualDisk-1910117612-project-member] Lock "fed831e0-4518-4025-89b1-7f6b644e013d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.040s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.362560] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Successfully updated port: 7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.417137] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea25d8-7d2e-7f60-2496-cce5c356a4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.011411} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.417592] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.417783] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] baf28ebf-3ab8-465c-a13b-705ccf3510dc/baf28ebf-3ab8-465c-a13b-705ccf3510dc.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 801.419036] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b619dc6-be55-4bbc-80ae-c4e8719aad1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.427650] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 801.427650] env[62914]: value = "task-4831859" [ 801.427650] env[62914]: _type = "Task" [ 801.427650] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.445203] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.505084] env[62914]: DEBUG oslo_concurrency.lockutils [req-aeedb22f-ecb4-4185-a98d-a87f29a10d74 req-ba215460-a7b1-4f44-81be-616a355811bc service nova] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.542292] env[62914]: DEBUG nova.compute.manager [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Received event network-changed-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 801.542292] env[62914]: DEBUG nova.compute.manager [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Refreshing instance network info cache due to event network-changed-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 801.542425] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] Acquiring lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.542571] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] Acquired lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.542743] env[62914]: DEBUG nova.network.neutron [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Refreshing network info cache for port 141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 801.683668] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831858, 'name': PowerOffVM_Task, 'duration_secs': 0.413436} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.684113] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 801.684302] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 801.685159] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d12f142-5a36-403d-92a0-997d5dad5e72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.764694] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 801.764694] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 801.764694] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Deleting the datastore file [datastore1] 76dfbf82-0ed0-4621-890c-060b187b47e0 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.767799] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9623debb-5088-4767-a490-52cb2bf30fa4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.784327] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for the task: (returnval){ [ 801.784327] env[62914]: value = "task-4831861" [ 801.784327] env[62914]: _type = "Task" [ 801.784327] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.799517] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.864709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.864938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.865347] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.944769] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831859, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.306650] env[62914]: DEBUG oslo_vmware.api [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Task: {'id': task-4831861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380055} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.306650] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.306650] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 802.306650] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 802.306650] env[62914]: INFO nova.compute.manager [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 802.307240] env[62914]: DEBUG oslo.service.loopingcall [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.307240] env[62914]: DEBUG nova.compute.manager [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 802.307240] env[62914]: DEBUG nova.network.neutron [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 802.374351] env[62914]: DEBUG nova.network.neutron [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Updated VIF entry in instance network info cache for port 141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 802.374909] env[62914]: DEBUG nova.network.neutron [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Updating instance_info_cache with network_info: [{"id": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "address": "fa:16:3e:84:50:83", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141c4f39-6b", "ovs_interfaceid": "141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.400966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aea5f9-5ae4-406b-b506-0c93545bef55 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.413013] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.415655] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e477a6c7-04a9-498b-bfe1-de4b3b7dc729 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.458903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e27be8-d748-4284-8689-8f0f103a0568 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.465615] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537391} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.468108] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] baf28ebf-3ab8-465c-a13b-705ccf3510dc/baf28ebf-3ab8-465c-a13b-705ccf3510dc.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 802.468468] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.468810] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38b161c4-e6b7-4391-a3d3-2bf73dd08658 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.472626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c21fc8-d3e7-4fc1-ad42-6fe6173ac963 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.488429] env[62914]: DEBUG nova.compute.provider_tree [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.493497] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 802.493497] env[62914]: value = "task-4831862" [ 802.493497] env[62914]: _type = "Task" [ 802.493497] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.503248] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.671507] env[62914]: DEBUG nova.network.neutron [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Updating instance_info_cache with network_info: [{"id": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "address": "fa:16:3e:e8:7d:75", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df887d7-ca", "ovs_interfaceid": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.776201] env[62914]: DEBUG nova.compute.manager [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 802.776680] env[62914]: DEBUG nova.compute.manager [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing instance network info cache due to event network-changed-c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 802.776680] env[62914]: DEBUG oslo_concurrency.lockutils [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] Acquiring lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.776680] env[62914]: DEBUG oslo_concurrency.lockutils [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] Acquired lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.776922] env[62914]: DEBUG nova.network.neutron [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Refreshing network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 802.881625] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1f1f520-25b1-4d7b-ab1e-8c398ecda9a6 req-a15d0203-ab57-469c-888e-6e58a506b776 service nova] Releasing lock "refresh_cache-baf28ebf-3ab8-465c-a13b-705ccf3510dc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.995379] env[62914]: DEBUG nova.scheduler.client.report [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.010251] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074694} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.011493] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.011493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab72f266-4e52-4fcb-b57a-ca319f598399 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.035889] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] baf28ebf-3ab8-465c-a13b-705ccf3510dc/baf28ebf-3ab8-465c-a13b-705ccf3510dc.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.036972] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faa41cd2-66df-4158-b5ce-f55ba12c6db5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.059558] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 803.059558] env[62914]: value = "task-4831863" [ 803.059558] env[62914]: _type = "Task" [ 803.059558] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.069998] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831863, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.174566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.174837] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Instance network_info: |[{"id": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "address": "fa:16:3e:e8:7d:75", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df887d7-ca", "ovs_interfaceid": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 803.175301] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:7d:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7df887d7-caf7-4a91-b3f6-2476e768b7c2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.183715] env[62914]: DEBUG oslo.service.loopingcall [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.184116] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 803.184205] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34f00c92-4203-4f85-88f3-506fc438f41d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.207400] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.207400] env[62914]: value = "task-4831864" [ 803.207400] env[62914]: _type = "Task" [ 803.207400] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.218275] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831864, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.460685] env[62914]: DEBUG nova.network.neutron [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Port c148a862-a6a8-4c52-b1df-8e764ee00e94 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 803.461383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.461964] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.462509] env[62914]: DEBUG nova.network.neutron [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.506486] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.833s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.506486] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 803.507867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 39.654s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.508069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.508270] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 803.508549] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.804s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.508770] env[62914]: DEBUG nova.objects.instance [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lazy-loading 'resources' on Instance uuid 5a704020-921e-4ede-9fd9-b745c027a158 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.510940] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6e1080-917d-4df9-9283-237fb544b505 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.532789] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23cbdf6-ec62-445c-88fe-4f360ea62488 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.568279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f64297-77e0-4e3f-b9f2-83208e433e5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.580962] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831863, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.582490] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63225668-d784-41b3-932e-f930a615d23f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.628843] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179360MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 803.629162] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.720191] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831864, 'name': CreateVM_Task, 'duration_secs': 0.386553} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.722998] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 803.722998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.722998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.722998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.722998] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c2b3186-f70b-4896-8cb4-3068aebc2093 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.728395] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 803.728395] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd8914-8c54-9c8c-b0be-842fa7169320" [ 803.728395] env[62914]: _type = "Task" [ 803.728395] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.743621] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd8914-8c54-9c8c-b0be-842fa7169320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.784568] env[62914]: DEBUG nova.network.neutron [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.794576] env[62914]: DEBUG nova.network.neutron [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updated VIF entry in instance network info cache for port c2f62cb3-f405-432b-9d8c-8c08ea54e240. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.794576] env[62914]: DEBUG nova.network.neutron [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [{"id": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "address": "fa:16:3e:f7:4b:8f", "network": {"id": "ca1b7231-553e-43ee-81c9-c96d74fd74dc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084159956-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2cb4ab64c41d4df2b72b26c54a0bdccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f62cb3-f4", "ovs_interfaceid": "c2f62cb3-f405-432b-9d8c-8c08ea54e240", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.014780] env[62914]: DEBUG nova.compute.utils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 804.021561] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 804.022090] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 804.084016] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831863, 'name': ReconfigVM_Task, 'duration_secs': 0.570588} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.086335] env[62914]: DEBUG nova.policy [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c105c6c6b2ee490e8b9b949e160d1105', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94b26b3770474542a09883a765265a5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 804.088021] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Reconfigured VM instance instance-00000035 to attach disk [datastore2] baf28ebf-3ab8-465c-a13b-705ccf3510dc/baf28ebf-3ab8-465c-a13b-705ccf3510dc.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.089095] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cac81178-8b7b-4fa2-a418-7d698dbca736 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.099422] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 804.099422] env[62914]: value = "task-4831865" [ 804.099422] env[62914]: _type = "Task" [ 804.099422] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.118607] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831865, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.240039] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bd8914-8c54-9c8c-b0be-842fa7169320, 'name': SearchDatastore_Task, 'duration_secs': 0.011513} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.243187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.243455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.243695] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.243843] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.244264] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.244817] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c74e795f-f9e1-4aa3-bb1d-459236e17f52 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.255551] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.255735] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 804.259471] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b05b9a2-946c-42ba-a022-a3938d260654 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.267171] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 804.267171] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249e399-4325-3422-3944-cd1cb12834e2" [ 804.267171] env[62914]: _type = "Task" [ 804.267171] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.277227] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249e399-4325-3422-3944-cd1cb12834e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.288579] env[62914]: INFO nova.compute.manager [-] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Took 1.98 seconds to deallocate network for instance. [ 804.296921] env[62914]: DEBUG oslo_concurrency.lockutils [req-e37e287c-10e3-4bde-8607-82c93517c53f req-fd9a1362-60b6-4f37-9565-f1479b9b8de3 service nova] Releasing lock "refresh_cache-1d74504f-b641-42c6-a420-c80614d69b23" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.323539] env[62914]: DEBUG nova.compute.manager [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Received event network-vif-plugged-7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 804.323739] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Acquiring lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.323987] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.324245] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.324440] env[62914]: DEBUG nova.compute.manager [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] No waiting events found dispatching network-vif-plugged-7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 804.326460] env[62914]: WARNING nova.compute.manager [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Received unexpected event network-vif-plugged-7df887d7-caf7-4a91-b3f6-2476e768b7c2 for instance with vm_state building and task_state spawning. [ 804.326460] env[62914]: DEBUG nova.compute.manager [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Received event network-changed-7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 804.326460] env[62914]: DEBUG nova.compute.manager [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Refreshing instance network info cache due to event network-changed-7df887d7-caf7-4a91-b3f6-2476e768b7c2. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 804.329450] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Acquiring lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.329580] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Acquired lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.329947] env[62914]: DEBUG nova.network.neutron [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Refreshing network info cache for port 7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.413867] env[62914]: DEBUG nova.network.neutron [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.530814] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 804.557609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.557971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.558761] env[62914]: DEBUG nova.compute.manager [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Going to confirm migration 3 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 804.625106] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831865, 'name': Rename_Task, 'duration_secs': 0.32266} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.625460] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 804.625673] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36cffb0f-6eff-4f92-b20c-52ed8e1526b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.634929] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 804.634929] env[62914]: value = "task-4831866" [ 804.634929] env[62914]: _type = "Task" [ 804.634929] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.647480] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.722155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.722441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.784246] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249e399-4325-3422-3944-cd1cb12834e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010894} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.790900] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31cc050c-4322-4821-8c14-63ef92f41e63 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.797536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.801739] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 804.801739] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d6a250-cce4-a95d-65f6-3915965856e0" [ 804.801739] env[62914]: _type = "Task" [ 804.801739] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.812562] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d6a250-cce4-a95d-65f6-3915965856e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.823389] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Successfully created port: 3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.855380] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d1c8ae-289b-4ea5-9be5-772e53b71855 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.864586] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116533a3-f20b-448d-9709-084c79601182 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.899407] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd9f7dd-f96f-4bdd-a71e-5f5397fbb06e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.906546] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ec7e00-70c9-4674-9508-7d463a22ad45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.923584] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.925032] env[62914]: DEBUG nova.compute.provider_tree [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.949841] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "1d74504f-b641-42c6-a420-c80614d69b23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.950237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.950575] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "1d74504f-b641-42c6-a420-c80614d69b23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.950654] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.950833] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.954456] env[62914]: INFO nova.compute.manager [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Terminating instance [ 804.955710] env[62914]: DEBUG nova.compute.manager [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 804.955890] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 804.956772] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfb3262-c7ba-440f-838d-8e135c8a90ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.966235] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 804.966389] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1b0fb2e-c6ed-496c-9ff8-18193512c635 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.974325] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 804.974325] env[62914]: value = "task-4831867" [ 804.974325] env[62914]: _type = "Task" [ 804.974325] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.984834] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.146300] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831866, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.168095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.168401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.168526] env[62914]: DEBUG nova.network.neutron [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.168801] env[62914]: DEBUG nova.objects.instance [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'info_cache' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.314208] env[62914]: DEBUG nova.compute.manager [req-b6ff02d7-fcf5-4dea-954f-4db3f38d8728 req-074ef4cd-217d-4a48-99cd-6fe38a41f21f service nova] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Received event network-vif-deleted-29704154-556c-4ee1-a5d2-fafcd0ac6017 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 805.324959] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d6a250-cce4-a95d-65f6-3915965856e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010525} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.324959] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.325934] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b77a3d27-fe9f-49fc-95d1-15fe82762833/b77a3d27-fe9f-49fc-95d1-15fe82762833.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 805.325934] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-982c9b4b-0eb2-48d1-bc2e-4538880736f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.336409] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 805.336409] env[62914]: value = "task-4831868" [ 805.336409] env[62914]: _type = "Task" [ 805.336409] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.346977] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.361172] env[62914]: DEBUG nova.network.neutron [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Updated VIF entry in instance network info cache for port 7df887d7-caf7-4a91-b3f6-2476e768b7c2. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 805.361172] env[62914]: DEBUG nova.network.neutron [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Updating instance_info_cache with network_info: [{"id": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "address": "fa:16:3e:e8:7d:75", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df887d7-ca", "ovs_interfaceid": "7df887d7-caf7-4a91-b3f6-2476e768b7c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.427950] env[62914]: DEBUG nova.compute.manager [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62914) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 805.428247] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.429170] env[62914]: DEBUG nova.scheduler.client.report [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.488027] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831867, 'name': PowerOffVM_Task, 'duration_secs': 0.222194} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.488027] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 805.488027] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 805.488027] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6871037e-d543-4bd4-810b-6cd43280d10e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.548873] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 805.557903] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 805.558154] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 805.558344] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Deleting the datastore file [datastore2] 1d74504f-b641-42c6-a420-c80614d69b23 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.558908] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b6ef117-0561-44fe-91a9-2239fa6ec8cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.567438] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for the task: (returnval){ [ 805.567438] env[62914]: value = "task-4831870" [ 805.567438] env[62914]: _type = "Task" [ 805.567438] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.578735] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.587514] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.587783] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.587939] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.588379] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.588379] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.588535] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.588773] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.588980] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.589151] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.589305] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.589480] env[62914]: DEBUG nova.virt.hardware [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.591384] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0597457-c784-401f-9323-30a7a737bf64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.600775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e85a818-44fc-4365-9422-4ed7c5bf405f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.654271] env[62914]: DEBUG oslo_vmware.api [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831866, 'name': PowerOnVM_Task, 'duration_secs': 0.716176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.654339] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 805.654599] env[62914]: INFO nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Took 9.06 seconds to spawn the instance on the hypervisor. [ 805.655653] env[62914]: DEBUG nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 805.656178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e53324f-a160-48fa-9c70-ce5e7ca19274 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.849215] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831868, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.864591] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d6df5a8-792d-4fe0-9dec-adb67d3626dd req-c4e31355-6b4d-4cf4-af57-8b4346269bac service nova] Releasing lock "refresh_cache-b77a3d27-fe9f-49fc-95d1-15fe82762833" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.937234] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.428s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.939730] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.834s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.939976] env[62914]: DEBUG nova.objects.instance [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lazy-loading 'resources' on Instance uuid 8b83f82b-42f7-4f33-abc4-ff278d343309 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.985643] env[62914]: INFO nova.scheduler.client.report [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocations for instance 5a704020-921e-4ede-9fd9-b745c027a158 [ 806.080320] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.186998] env[62914]: INFO nova.compute.manager [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Took 45.03 seconds to build instance. [ 806.347486] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831868, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553626} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.347748] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b77a3d27-fe9f-49fc-95d1-15fe82762833/b77a3d27-fe9f-49fc-95d1-15fe82762833.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 806.347962] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.348277] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5350b549-c1bf-4e6c-b2ff-0d9e46b3337e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.357157] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 806.357157] env[62914]: value = "task-4831871" [ 806.357157] env[62914]: _type = "Task" [ 806.357157] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.368448] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.497492] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d979676-3227-408f-bdd7-783610e58cd4 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "5a704020-921e-4ede-9fd9-b745c027a158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.914s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.540310] env[62914]: DEBUG nova.compute.manager [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Received event network-vif-plugged-3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 806.541763] env[62914]: DEBUG oslo_concurrency.lockutils [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] Acquiring lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.542124] env[62914]: DEBUG oslo_concurrency.lockutils [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.542374] env[62914]: DEBUG oslo_concurrency.lockutils [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.542683] env[62914]: DEBUG nova.compute.manager [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] No waiting events found dispatching network-vif-plugged-3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 806.543013] env[62914]: WARNING nova.compute.manager [req-b0c02610-31ee-45fb-afb6-7644403da570 req-b38b43ab-5dde-4b9b-970c-fb143e2c4e7a service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Received unexpected event network-vif-plugged-3d606e37-edb2-4b01-b58d-acec974dda62 for instance with vm_state building and task_state spawning. [ 806.584742] env[62914]: DEBUG oslo_vmware.api [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Task: {'id': task-4831870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.687578} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.585633] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 806.586011] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 806.586296] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 806.586504] env[62914]: INFO nova.compute.manager [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Took 1.63 seconds to destroy the instance on the hypervisor. [ 806.586884] env[62914]: DEBUG oslo.service.loopingcall [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.587216] env[62914]: DEBUG nova.compute.manager [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 806.587299] env[62914]: DEBUG nova.network.neutron [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.590298] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Successfully updated port: 3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.690551] env[62914]: DEBUG oslo_concurrency.lockutils [None req-59765cbb-0bec-4b54-a3f7-e89b47d4ac1f tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.058s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.870985] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072631} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.871293] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.872100] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d211c012-de6a-45f9-8387-79f2c5abdb90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.902733] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] b77a3d27-fe9f-49fc-95d1-15fe82762833/b77a3d27-fe9f-49fc-95d1-15fe82762833.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.904140] env[62914]: DEBUG nova.network.neutron [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.905443] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51125b0e-ac4b-4ae3-89f4-57f614375dba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.924110] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.924417] env[62914]: DEBUG nova.objects.instance [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'migration_context' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.932548] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 806.932548] env[62914]: value = "task-4831872" [ 806.932548] env[62914]: _type = "Task" [ 806.932548] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.945165] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.095567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.095567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquired lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.095769] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 807.109144] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92776a7c-efe8-4196-9413-9c9ab6d0a0e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.120798] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8c96a9-9631-44cc-ba3b-d05e286c4d69 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.158463] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0374f138-9a3a-4017-bfb5-6da749eada38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.167695] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24da299-30f5-40e3-86ed-aac86ddfee41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.188236] env[62914]: DEBUG nova.compute.provider_tree [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.194062] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 807.426708] env[62914]: DEBUG nova.objects.base [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Object Instance<2f7bc586-af68-4d9d-81e2-8247371dfa7f> lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 807.427688] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b316d61-298e-4a3c-8d3e-a2eda7e38f68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.453262] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e590084e-9229-4f90-bf0d-950d3c0cb38c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.458949] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831872, 'name': ReconfigVM_Task, 'duration_secs': 0.300911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.459760] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Reconfigured VM instance instance-00000036 to attach disk [datastore2] b77a3d27-fe9f-49fc-95d1-15fe82762833/b77a3d27-fe9f-49fc-95d1-15fe82762833.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.461124] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6e20f94-fff5-47c3-ad98-12ec55fadde8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.463378] env[62914]: DEBUG oslo_vmware.api [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 807.463378] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c0f6ff-9725-8330-5b38-3189948868a7" [ 807.463378] env[62914]: _type = "Task" [ 807.463378] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.469373] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 807.469373] env[62914]: value = "task-4831873" [ 807.469373] env[62914]: _type = "Task" [ 807.469373] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.475920] env[62914]: DEBUG oslo_vmware.api [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c0f6ff-9725-8330-5b38-3189948868a7, 'name': SearchDatastore_Task, 'duration_secs': 0.007644} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.476315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.481920] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831873, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.574815] env[62914]: DEBUG nova.network.neutron [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.653602] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.690532] env[62914]: DEBUG nova.scheduler.client.report [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 807.720380] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.889395] env[62914]: DEBUG nova.network.neutron [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Updating instance_info_cache with network_info: [{"id": "3d606e37-edb2-4b01-b58d-acec974dda62", "address": "fa:16:3e:6d:fb:02", "network": {"id": "78bda024-f872-4132-bbc3-bf672e19df5e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1045317739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94b26b3770474542a09883a765265a5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d606e37-ed", "ovs_interfaceid": "3d606e37-edb2-4b01-b58d-acec974dda62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.982234] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831873, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.076071] env[62914]: INFO nova.compute.manager [-] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Took 1.49 seconds to deallocate network for instance. [ 808.196907] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.257s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.200501] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.324s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.203190] env[62914]: INFO nova.compute.claims [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.234513] env[62914]: INFO nova.scheduler.client.report [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleted allocations for instance 8b83f82b-42f7-4f33-abc4-ff278d343309 [ 808.395507] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Releasing lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.395507] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Instance network_info: |[{"id": "3d606e37-edb2-4b01-b58d-acec974dda62", "address": "fa:16:3e:6d:fb:02", "network": {"id": "78bda024-f872-4132-bbc3-bf672e19df5e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1045317739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94b26b3770474542a09883a765265a5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d606e37-ed", "ovs_interfaceid": "3d606e37-edb2-4b01-b58d-acec974dda62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 808.395507] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:fb:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d606e37-edb2-4b01-b58d-acec974dda62', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.407943] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Creating folder: Project (94b26b3770474542a09883a765265a5b). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 808.407943] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ac07eb9-9119-4dcc-8d47-6f7de030c8b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.420032] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Created folder: Project (94b26b3770474542a09883a765265a5b) in parent group-v941773. [ 808.420032] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Creating folder: Instances. Parent ref: group-v941934. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 808.420032] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80ee907b-5853-4af4-86f6-ece90c845908 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.433304] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Created folder: Instances in parent group-v941934. [ 808.434299] env[62914]: DEBUG oslo.service.loopingcall [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.434684] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 808.435062] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d45babfc-5e56-4d42-90d0-181aa9881bde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.459363] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.459363] env[62914]: value = "task-4831876" [ 808.459363] env[62914]: _type = "Task" [ 808.459363] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.469653] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831876, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.479560] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831873, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.592373] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.642406] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.642770] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.643041] env[62914]: DEBUG nova.objects.instance [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.742997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-94816abd-771f-408a-a547-0f418309acc3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "8b83f82b-42f7-4f33-abc4-ff278d343309" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.437s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.974859] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831876, 'name': CreateVM_Task, 'duration_secs': 0.49114} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.976238] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 808.978209] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.978209] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.978209] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 808.978639] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0738d92-8243-4a9b-b407-67dc715c2fc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.984787] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831873, 'name': Rename_Task, 'duration_secs': 1.154931} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.985614] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 808.986320] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a336081-31e4-4743-9cba-c8fb8d48ccce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.992609] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 808.992609] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522b8156-8bef-483e-0e48-1bcf247c4c0b" [ 808.992609] env[62914]: _type = "Task" [ 808.992609] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.997876] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 808.997876] env[62914]: value = "task-4831877" [ 808.997876] env[62914]: _type = "Task" [ 808.997876] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.001569] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522b8156-8bef-483e-0e48-1bcf247c4c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.012491] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.147895] env[62914]: DEBUG nova.objects.instance [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.511131] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522b8156-8bef-483e-0e48-1bcf247c4c0b, 'name': SearchDatastore_Task, 'duration_secs': 0.022266} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.511537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.511725] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.511961] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.512132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.512334] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.512939] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3b5b8f0-2dbf-4c39-9221-4fc21a8027fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.518847] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831877, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.524614] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.524866] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 809.525909] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7538266b-e428-4bef-ba58-5d54804c7658 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.533705] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 809.533705] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a23cc8-fa65-9a1c-7f10-6f6c0a17f1af" [ 809.533705] env[62914]: _type = "Task" [ 809.533705] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.543205] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a23cc8-fa65-9a1c-7f10-6f6c0a17f1af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.635542] env[62914]: DEBUG nova.compute.manager [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Received event network-changed-3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 809.635754] env[62914]: DEBUG nova.compute.manager [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Refreshing instance network info cache due to event network-changed-3d606e37-edb2-4b01-b58d-acec974dda62. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 809.635970] env[62914]: DEBUG oslo_concurrency.lockutils [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] Acquiring lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.636281] env[62914]: DEBUG oslo_concurrency.lockutils [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] Acquired lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.636368] env[62914]: DEBUG nova.network.neutron [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Refreshing network info cache for port 3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 809.650408] env[62914]: DEBUG nova.objects.base [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance<2d48056c-d38f-4be1-b28b-71da14607870> lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 809.653881] env[62914]: DEBUG nova.network.neutron [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 809.730329] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae6fe2d-2b30-4269-8c00-496b8d57a563 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.739847] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69febdf6-2234-4cb6-966f-9baffb4db95b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.781633] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c195fc2-c5b2-413a-807d-b346be01d129 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.139s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.784640] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce30d5ad-c26c-4903-9020-73704456ab0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.795090] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e99874f-84f5-4df8-909f-9ef65a8c08ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.812021] env[62914]: DEBUG nova.compute.provider_tree [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.013246] env[62914]: DEBUG oslo_vmware.api [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831877, 'name': PowerOnVM_Task, 'duration_secs': 0.801034} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.013246] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 810.013246] env[62914]: INFO nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Took 9.62 seconds to spawn the instance on the hypervisor. [ 810.013423] env[62914]: DEBUG nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 810.014547] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cf3a86-2c09-40b9-bc13-6bd3f0842733 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.045037] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a23cc8-fa65-9a1c-7f10-6f6c0a17f1af, 'name': SearchDatastore_Task, 'duration_secs': 0.014697} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.045663] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36712824-e63a-4b25-a0c9-ecee7f539cdc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.052123] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 810.052123] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ccfc59-2e66-98b1-19ad-edd4a90c6ee5" [ 810.052123] env[62914]: _type = "Task" [ 810.052123] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.061031] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ccfc59-2e66-98b1-19ad-edd4a90c6ee5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.314036] env[62914]: DEBUG nova.scheduler.client.report [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.362092] env[62914]: DEBUG nova.network.neutron [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Updated VIF entry in instance network info cache for port 3d606e37-edb2-4b01-b58d-acec974dda62. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 810.362297] env[62914]: DEBUG nova.network.neutron [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Updating instance_info_cache with network_info: [{"id": "3d606e37-edb2-4b01-b58d-acec974dda62", "address": "fa:16:3e:6d:fb:02", "network": {"id": "78bda024-f872-4132-bbc3-bf672e19df5e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1045317739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94b26b3770474542a09883a765265a5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d606e37-ed", "ovs_interfaceid": "3d606e37-edb2-4b01-b58d-acec974dda62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.532219] env[62914]: INFO nova.compute.manager [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Took 48.41 seconds to build instance. [ 810.565166] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ccfc59-2e66-98b1-19ad-edd4a90c6ee5, 'name': SearchDatastore_Task, 'duration_secs': 0.044392} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.565566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.565872] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776/45644d9d-1d7d-4c2c-825d-fb3a2f6f2776.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 810.566565] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d951509-9b7a-448e-8f0f-0ae7da2589ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.575848] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 810.575848] env[62914]: value = "task-4831878" [ 810.575848] env[62914]: _type = "Task" [ 810.575848] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.586329] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.819207] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.819533] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 810.826024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.180s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.826024] env[62914]: DEBUG nova.objects.instance [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lazy-loading 'resources' on Instance uuid bc6da94e-4de8-4e56-a071-d04c5e5dad18 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.867728] env[62914]: DEBUG oslo_concurrency.lockutils [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] Releasing lock "refresh_cache-45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.868101] env[62914]: DEBUG nova.compute.manager [req-26ed9427-0d90-4c34-ad19-f60b4a8bf19e req-c4e29c0c-20b7-49da-80ef-7c4cfd97c360 service nova] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Received event network-vif-deleted-c2f62cb3-f405-432b-9d8c-8c08ea54e240 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 810.900654] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.900800] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.901137] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.901749] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.901749] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.906261] env[62914]: INFO nova.compute.manager [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Terminating instance [ 810.909830] env[62914]: DEBUG nova.compute.manager [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 810.910130] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 810.911117] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1d5121-c330-4f78-8c69-008b2fda0cde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.920858] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 810.921271] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05f5e15e-81ae-4ed1-a9ee-9884e8f4ef0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.931676] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 810.931676] env[62914]: value = "task-4831879" [ 810.931676] env[62914]: _type = "Task" [ 810.931676] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.942734] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.036685] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89858354-36d9-4ee8-9b27-39786fdacfd8 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.140s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.090433] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831878, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.326961] env[62914]: DEBUG nova.compute.utils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.329512] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 811.329870] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 811.398915] env[62914]: DEBUG nova.policy [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '958529ce89d049f3ade8733e57d9f841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adf406f1352240aba2338e64b8f182b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 811.446533] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831879, 'name': PowerOffVM_Task, 'duration_secs': 0.398562} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.446885] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 811.447076] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 811.447570] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b7b2fa6-2a45-4d1e-8ca1-d5d10b44d617 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.528548] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 811.528905] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 811.529012] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleting the datastore file [datastore1] 6bdcd778-0942-41e7-a6fb-7c3413d34ef7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.529265] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b20dd7a6-a650-42ef-b4a1-0925a7829643 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.539189] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 811.542299] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for the task: (returnval){ [ 811.542299] env[62914]: value = "task-4831881" [ 811.542299] env[62914]: _type = "Task" [ 811.542299] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.555928] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.592483] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655083} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.592833] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776/45644d9d-1d7d-4c2c-825d-fb3a2f6f2776.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 811.593042] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.593319] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ceec6a93-8ae9-4450-b0d0-36e017b2efef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.602107] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 811.602107] env[62914]: value = "task-4831882" [ 811.602107] env[62914]: _type = "Task" [ 811.602107] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.618986] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.838956] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 811.861784] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Successfully created port: c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.026840] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3963e8ec-a3b4-4f35-b4a7-94f5b1e5d5c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.037498] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b05cbd-9dd6-4622-84c6-18f14202b7ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.087430] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16a9691-af7e-4a71-8a6c-8d285067bec8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.098168] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.102183] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32875b5d-3164-4306-974e-22bb3bf331e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.110034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.122139] env[62914]: DEBUG nova.compute.provider_tree [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.130052] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119547} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.130052] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.130052] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984f9736-11ad-440a-9304-0640dba95b75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.153788] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776/45644d9d-1d7d-4c2c-825d-fb3a2f6f2776.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.154531] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f2a9ff3-48c6-445f-a0d8-dd036960edc6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.175349] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 812.175349] env[62914]: value = "task-4831883" [ 812.175349] env[62914]: _type = "Task" [ 812.175349] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.184765] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.527034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.527146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.527720] env[62914]: DEBUG nova.objects.instance [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.588866] env[62914]: DEBUG oslo_vmware.api [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Task: {'id': task-4831881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.905835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.589209] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 812.589481] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 812.589707] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.589959] env[62914]: INFO nova.compute.manager [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Took 1.68 seconds to destroy the instance on the hypervisor. [ 812.590305] env[62914]: DEBUG oslo.service.loopingcall [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.590586] env[62914]: DEBUG nova.compute.manager [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 812.590707] env[62914]: DEBUG nova.network.neutron [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.628921] env[62914]: DEBUG nova.scheduler.client.report [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 812.685745] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.851343] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 812.878083] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.878349] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.878531] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.878690] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.878890] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.878992] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.879224] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.879394] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.879569] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.879737] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.879914] env[62914]: DEBUG nova.virt.hardware [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.880914] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c64cb9-0f31-4535-9abb-92dadb0a9973 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.890393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b466597e-6a43-4865-b4d7-4edeec311419 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.134975] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.312s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.137517] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.174s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.137772] env[62914]: DEBUG nova.objects.instance [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lazy-loading 'resources' on Instance uuid e6544702-bde7-4056-8a50-adede5c6a9d6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.154757] env[62914]: DEBUG nova.objects.instance [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.159032] env[62914]: INFO nova.scheduler.client.report [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Deleted allocations for instance bc6da94e-4de8-4e56-a071-d04c5e5dad18 [ 813.187831] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831883, 'name': ReconfigVM_Task, 'duration_secs': 0.691396} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.188813] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776/45644d9d-1d7d-4c2c-825d-fb3a2f6f2776.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.188813] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8be8e23a-a26f-43e9-9b3b-68721ca94cc5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.194341] env[62914]: DEBUG nova.compute.manager [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 813.197664] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36068d8-d93e-4ad0-bd4a-3b5ec9cca657 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.200857] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 813.200857] env[62914]: value = "task-4831884" [ 813.200857] env[62914]: _type = "Task" [ 813.200857] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.215211] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831884, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.446436] env[62914]: DEBUG nova.network.neutron [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.547579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.547579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.657031] env[62914]: DEBUG nova.objects.base [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance<2d48056c-d38f-4be1-b28b-71da14607870> lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 813.657456] env[62914]: DEBUG nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 813.667291] env[62914]: DEBUG oslo_concurrency.lockutils [None req-25d3a533-cecb-42d5-8e81-bce036b77384 tempest-ServersTestMultiNic-523841259 tempest-ServersTestMultiNic-523841259-project-member] Lock "bc6da94e-4de8-4e56-a071-d04c5e5dad18" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.366s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.690765] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Successfully updated port: c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.711176] env[62914]: INFO nova.compute.manager [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] instance snapshotting [ 813.717673] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831884, 'name': Rename_Task, 'duration_secs': 0.379438} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.718197] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 813.719374] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d6d3e5-9f1d-45a1-a9f3-329d1074de8b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.727028] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd038964-4497-481f-9aac-7d1a23fca0e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.751449] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b82461-11f5-4f42-b3fb-0c32dfc0860e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.754410] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 813.754410] env[62914]: value = "task-4831885" [ 813.754410] env[62914]: _type = "Task" [ 813.754410] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.773349] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831885, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.776786] env[62914]: DEBUG nova.policy [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 813.948780] env[62914]: INFO nova.compute.manager [-] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Took 1.36 seconds to deallocate network for instance. [ 814.157339] env[62914]: DEBUG nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Successfully created port: 55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.194834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.194834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.194834] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 814.268064] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831885, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.272018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f393650c-0829-4147-b689-061a9776a9d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.277373] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 814.278087] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-173035fb-a1d1-459b-8700-148ba1f094fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.281263] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be857ea7-c607-485a-b0ff-0451a1f36bef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.316082] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e7e6e7-413a-4203-9508-08ad9d9bf5cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.319539] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 814.319539] env[62914]: value = "task-4831886" [ 814.319539] env[62914]: _type = "Task" [ 814.319539] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.328122] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7697bad-205e-43c4-8778-21adc5ba89b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.336609] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831886, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.348582] env[62914]: DEBUG nova.compute.provider_tree [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.456819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.596132] env[62914]: DEBUG nova.compute.manager [req-e645ef8f-b1ed-44c3-aefb-3bf7bb6ef2ca req-d4dcaf58-038a-4285-979d-7963932a3dc6 service nova] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Received event network-vif-deleted-e42f6371-f854-4e39-ae20-c78d59217dbb {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 814.741929] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.772149] env[62914]: DEBUG oslo_vmware.api [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831885, 'name': PowerOnVM_Task, 'duration_secs': 0.986813} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.772496] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 814.772704] env[62914]: INFO nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Took 9.22 seconds to spawn the instance on the hypervisor. [ 814.772891] env[62914]: DEBUG nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 814.773736] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b44c85-b187-40c9-a929-06d65e3e9c91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.832913] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831886, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.851613] env[62914]: DEBUG nova.scheduler.client.report [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 814.906996] env[62914]: DEBUG nova.network.neutron [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.941697] env[62914]: DEBUG nova.compute.manager [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 814.941980] env[62914]: DEBUG oslo_concurrency.lockutils [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.942164] env[62914]: DEBUG oslo_concurrency.lockutils [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.942327] env[62914]: DEBUG oslo_concurrency.lockutils [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.942506] env[62914]: DEBUG nova.compute.manager [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] No waiting events found dispatching network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 814.942672] env[62914]: WARNING nova.compute.manager [req-4bd31f85-3211-4ec6-ac3a-3365d3ee35ad req-392ad77c-610d-4c67-a395-591ba734097f service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received unexpected event network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf for instance with vm_state building and task_state spawning. [ 815.291999] env[62914]: INFO nova.compute.manager [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Took 51.61 seconds to build instance. [ 815.332497] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831886, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.358758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.221s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.361485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.077s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.363097] env[62914]: INFO nova.compute.claims [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.387175] env[62914]: INFO nova.scheduler.client.report [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Deleted allocations for instance e6544702-bde7-4056-8a50-adede5c6a9d6 [ 815.410061] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.410470] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance network_info: |[{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 815.410990] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:e7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3221de3-00d5-45e7-af68-04297360fbcf', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.421421] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating folder: Project (adf406f1352240aba2338e64b8f182b4). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 815.421912] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60f6e321-8b1b-420b-8a69-76189b6c3c99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.434961] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created folder: Project (adf406f1352240aba2338e64b8f182b4) in parent group-v941773. [ 815.435817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating folder: Instances. Parent ref: group-v941937. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 815.435817] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c5667ee-8da1-4257-abbc-939d524bf412 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.445548] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created folder: Instances in parent group-v941937. [ 815.445548] env[62914]: DEBUG oslo.service.loopingcall [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.445711] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 815.446280] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4f0a475-ef31-4a47-bc0f-c0445ba2c2ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.467961] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.467961] env[62914]: value = "task-4831889" [ 815.467961] env[62914]: _type = "Task" [ 815.467961] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.476768] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.794597] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7303ec4b-6e46-4dd2-8fb1-684f0485cc66 tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.712s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.832341] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831886, 'name': CreateSnapshot_Task, 'duration_secs': 1.362143} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.832682] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 815.833525] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25bc08f8-4f3e-4777-9fa6-916e2d0389ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.896503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-775d4075-dc56-4fab-9f36-82b2e9ec9e8e tempest-ServerShowV254Test-1848756284 tempest-ServerShowV254Test-1848756284-project-member] Lock "e6544702-bde7-4056-8a50-adede5c6a9d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.747s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.937193] env[62914]: DEBUG nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Successfully updated port: 55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.980935] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.298201] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 816.356790] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 816.356790] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3ebf0274-b93c-4ccb-a278-3bfd295c4210 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.366392] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 816.366392] env[62914]: value = "task-4831890" [ 816.366392] env[62914]: _type = "Task" [ 816.366392] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.377486] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831890, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.439657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.439874] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.440100] env[62914]: DEBUG nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.485354] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.818616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.879066] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831890, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.907484] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2c9834-0319-45b9-b9ec-7c1c7313b0ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.915761] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8da197-0ed9-42a0-80e3-fa61740c9a7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.950379] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a639df39-2201-4d97-a63d-eb9fcb21f118 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.959664] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4923b369-857a-413e-9990-dcd38f1b93b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.976639] env[62914]: DEBUG nova.compute.provider_tree [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.987055] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.989441] env[62914]: WARNING nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 817.257528] env[62914]: DEBUG nova.compute.manager [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-plugged-55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 817.257920] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.258129] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.258361] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.258584] env[62914]: DEBUG nova.compute.manager [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] No waiting events found dispatching network-vif-plugged-55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 817.258817] env[62914]: WARNING nova.compute.manager [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received unexpected event network-vif-plugged-55d78b2e-b665-4a1c-84fe-47e02f937395 for instance with vm_state active and task_state None. [ 817.259396] env[62914]: DEBUG nova.compute.manager [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-changed-55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 817.259396] env[62914]: DEBUG nova.compute.manager [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing instance network info cache due to event network-changed-55d78b2e-b665-4a1c-84fe-47e02f937395. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 817.259586] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.379741] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831890, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.456411] env[62914]: DEBUG nova.network.neutron [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55d78b2e-b665-4a1c-84fe-47e02f937395", "address": "fa:16:3e:2b:6f:44", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55d78b2e-b6", "ovs_interfaceid": "55d78b2e-b665-4a1c-84fe-47e02f937395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.482030] env[62914]: DEBUG nova.scheduler.client.report [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.489612] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.778143] env[62914]: DEBUG nova.compute.manager [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 817.778358] env[62914]: DEBUG nova.compute.manager [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing instance network info cache due to event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 817.778661] env[62914]: DEBUG oslo_concurrency.lockutils [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.778754] env[62914]: DEBUG oslo_concurrency.lockutils [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.778921] env[62914]: DEBUG nova.network.neutron [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.879943] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831890, 'name': CloneVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.959504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.960228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.960400] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.960934] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.960934] env[62914]: DEBUG nova.network.neutron [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing network info cache for port 55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.963330] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4133e11a-85ec-4848-981b-a87c3aafc1a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.985211] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.985518] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.985680] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.985870] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.986032] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.986194] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.986410] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.986573] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.986744] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.986914] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.987114] env[62914]: DEBUG nova.virt.hardware [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.993628] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfiguring VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 817.997837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.998367] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 818.000955] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4144fb64-f60e-4784-a99c-25601c8ae705 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.015934] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.210s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.016325] env[62914]: DEBUG nova.objects.instance [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lazy-loading 'resources' on Instance uuid 4496a977-30b2-4323-a561-884633958cdf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.027040] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831889, 'name': CreateVM_Task, 'duration_secs': 2.16181} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.029072] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 818.029511] env[62914]: DEBUG oslo_vmware.api [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 818.029511] env[62914]: value = "task-4831891" [ 818.029511] env[62914]: _type = "Task" [ 818.029511] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.030648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.030648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.031095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.031732] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76df9070-3f01-4327-88a5-71134ad23bf2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.042461] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 818.042461] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d58451-999d-dcaf-317a-9a1e3a0a20fb" [ 818.042461] env[62914]: _type = "Task" [ 818.042461] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.045415] env[62914]: DEBUG oslo_vmware.api [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831891, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.055309] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d58451-999d-dcaf-317a-9a1e3a0a20fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.381349] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831890, 'name': CloneVM_Task, 'duration_secs': 1.562973} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.381684] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Created linked-clone VM from snapshot [ 818.382507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc14928f-9ab1-4176-9239-416061b2c8c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.394840] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Uploading image 8d584922-9f5f-403e-b8e7-e412d68ca5ee {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 818.424567] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 818.424567] env[62914]: value = "vm-941941" [ 818.424567] env[62914]: _type = "VirtualMachine" [ 818.424567] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 818.424902] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5d9e979d-5c2e-4184-a3a6-c79b087a7276 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.431199] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.431509] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.431700] env[62914]: DEBUG nova.compute.manager [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 818.432614] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f68ef70-0d4f-4dc1-ad92-618ba880a84f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.436820] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease: (returnval){ [ 818.436820] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e928c-bef7-418a-03cd-481638f3a33b" [ 818.436820] env[62914]: _type = "HttpNfcLease" [ 818.436820] env[62914]: } obtained for exporting VM: (result){ [ 818.436820] env[62914]: value = "vm-941941" [ 818.436820] env[62914]: _type = "VirtualMachine" [ 818.436820] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 818.437084] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the lease: (returnval){ [ 818.437084] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e928c-bef7-418a-03cd-481638f3a33b" [ 818.437084] env[62914]: _type = "HttpNfcLease" [ 818.437084] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 818.443802] env[62914]: DEBUG nova.compute.manager [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 818.447290] env[62914]: DEBUG nova.objects.instance [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'flavor' on Instance uuid 1fb67ac1-c0b7-48b9-8562-d457d46709bc {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.452097] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 818.452097] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e928c-bef7-418a-03cd-481638f3a33b" [ 818.452097] env[62914]: _type = "HttpNfcLease" [ 818.452097] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 818.517366] env[62914]: DEBUG nova.compute.utils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.519389] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 818.519510] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 818.545275] env[62914]: DEBUG oslo_vmware.api [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831891, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.565610] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d58451-999d-dcaf-317a-9a1e3a0a20fb, 'name': SearchDatastore_Task, 'duration_secs': 0.019042} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.565693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.565980] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.566284] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.566451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.566671] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 818.567011] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-350a1199-8c64-45a3-85e7-10390c2862f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.584317] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 818.586203] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 818.586728] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982d9858-8f12-4c6b-a51a-8e6ee21c0555 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.597061] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 818.597061] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52774102-b1aa-877d-fa3e-49a6f21c3584" [ 818.597061] env[62914]: _type = "Task" [ 818.597061] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.607644] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52774102-b1aa-877d-fa3e-49a6f21c3584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.659594] env[62914]: DEBUG nova.policy [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80cb6590db754eee833dbae943586e8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15573bba5e5448498fde03c18c64f4e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 818.760737] env[62914]: DEBUG nova.network.neutron [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updated VIF entry in instance network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 818.761120] env[62914]: DEBUG nova.network.neutron [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.946301] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 818.946301] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e928c-bef7-418a-03cd-481638f3a33b" [ 818.946301] env[62914]: _type = "HttpNfcLease" [ 818.946301] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 818.949005] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 818.949005] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e928c-bef7-418a-03cd-481638f3a33b" [ 818.949005] env[62914]: _type = "HttpNfcLease" [ 818.949005] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 818.952843] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2c26e7-19b0-4151-aec9-20e0983e524c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.959018] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 818.959018] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3821568-bdbc-41f8-8aff-12859eeb030c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.964880] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 818.965100] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 818.970600] env[62914]: DEBUG oslo_vmware.api [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 818.970600] env[62914]: value = "task-4831893" [ 818.970600] env[62914]: _type = "Task" [ 818.970600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.030945] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 819.035389] env[62914]: DEBUG nova.network.neutron [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updated VIF entry in instance network info cache for port 55d78b2e-b665-4a1c-84fe-47e02f937395. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 819.035724] env[62914]: DEBUG nova.network.neutron [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55d78b2e-b665-4a1c-84fe-47e02f937395", "address": "fa:16:3e:2b:6f:44", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55d78b2e-b6", "ovs_interfaceid": "55d78b2e-b665-4a1c-84fe-47e02f937395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.047616] env[62914]: DEBUG oslo_vmware.api [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.052605] env[62914]: DEBUG oslo_vmware.api [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831891, 'name': ReconfigVM_Task, 'duration_secs': 0.74244} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.053849] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.053849] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfigured VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 819.067702] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-efc23cdd-7ace-440c-86e9-9a2a2658b05a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.112513] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52774102-b1aa-877d-fa3e-49a6f21c3584, 'name': SearchDatastore_Task, 'duration_secs': 0.024498} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.119891] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b640b181-0d65-42ea-9644-5fe36c4ac539 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.126949] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 819.126949] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc8517-6493-2874-5b75-2f8c213bbb2c" [ 819.126949] env[62914]: _type = "Task" [ 819.126949] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.135980] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc8517-6493-2874-5b75-2f8c213bbb2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.237033] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f93760-6e45-49f5-bda3-025651cd4e76 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.246345] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6129ef52-f750-4e1e-886b-4f0edb03edae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.281648] env[62914]: DEBUG oslo_concurrency.lockutils [req-fb0945c4-7e71-43b1-909d-5294c7f0f401 req-81c344de-4779-4bae-b2a7-6576b2ed1707 service nova] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.283142] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Successfully created port: fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.285741] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2478c1-caa9-4b31-b65a-f514622bfb95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.298238] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c001a67-b481-460d-922e-ba7f0884060e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.319934] env[62914]: DEBUG nova.compute.provider_tree [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.484388] env[62914]: DEBUG oslo_vmware.api [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.540618] env[62914]: DEBUG oslo_concurrency.lockutils [req-ad05aef7-fd90-426e-991b-3ed696a1ec0a req-65826ba4-58cf-4bd7-9fec-6c91406c6103 service nova] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.564165] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c5f9e8de-5f34-4fee-8040-bd38f96f3099 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.037s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.641974] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc8517-6493-2874-5b75-2f8c213bbb2c, 'name': SearchDatastore_Task, 'duration_secs': 0.013599} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.642551] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.645861] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 819.645861] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfc0b6f3-f44e-4e1a-94d0-b102f5c1b40e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.651268] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 819.651268] env[62914]: value = "task-4831894" [ 819.651268] env[62914]: _type = "Task" [ 819.651268] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.660911] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.824081] env[62914]: DEBUG nova.scheduler.client.report [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.985838] env[62914]: DEBUG oslo_vmware.api [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831893, 'name': PowerOffVM_Task, 'duration_secs': 0.931967} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.988017] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 819.988017] env[62914]: DEBUG nova.compute.manager [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 819.988017] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d23b10-924f-494c-8bf6-7640dd6fbe30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.050518] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 820.086086] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.087032] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.087032] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.087129] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.087301] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.087542] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.087970] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.088477] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.088477] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.088597] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.088784] env[62914]: DEBUG nova.virt.hardware [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.089813] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8ca1dd-3b09-4c0b-a411-acf7666d40fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.101567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc97b744-9f31-4e85-ba98-c780f576b118 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.163320] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831894, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.331270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.334091] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.601s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.334422] env[62914]: DEBUG nova.objects.instance [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 968cbfbe-1570-48d6-890d-c7a680855574 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.365395] env[62914]: INFO nova.scheduler.client.report [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Deleted allocations for instance 4496a977-30b2-4323-a561-884633958cdf [ 820.503039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cb003151-809f-4c78-8c0d-f77bf92ab9b0 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.665780] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.865238} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.666144] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 820.666444] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.666745] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70a3f029-b000-4f1b-a9a3-8e8e328b8cbf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.677873] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 820.677873] env[62914]: value = "task-4831895" [ 820.677873] env[62914]: _type = "Task" [ 820.677873] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.689437] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.753185] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.753185] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.753381] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.753578] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.753750] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.756117] env[62914]: INFO nova.compute.manager [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Terminating instance [ 820.758585] env[62914]: DEBUG nova.compute.manager [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 820.758801] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 820.759680] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7eb7c1-9aa2-4c86-b259-71329198bf72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.769192] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 820.769523] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-302285b8-6f02-48ca-bc2e-728c6895516a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.778414] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 820.778414] env[62914]: value = "task-4831896" [ 820.778414] env[62914]: _type = "Task" [ 820.778414] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.788276] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.875737] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1ff635a8-2d7e-46d4-8f9b-e7243e718d6e tempest-ServersTestJSON-1417391248 tempest-ServersTestJSON-1417391248-project-member] Lock "4496a977-30b2-4323-a561-884633958cdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.797s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.174690] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Successfully updated port: fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.195362] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201042} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.195500] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.196454] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c8145c-634d-4244-9429-be1b3790380e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.223090] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.226837] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdeb00c6-f308-4747-b319-c96cd3c39be3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.248619] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 821.248619] env[62914]: value = "task-4831897" [ 821.248619] env[62914]: _type = "Task" [ 821.248619] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.262787] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831897, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.294078] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831896, 'name': PowerOffVM_Task, 'duration_secs': 0.321253} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.294402] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 821.294579] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 821.294847] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c70d3d86-414e-40fb-8d9c-66ede91e7c6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.366250] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 821.366496] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 821.367025] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Deleting the datastore file [datastore1] 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.367025] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-880950cb-c7f1-4c4a-951a-87248b5efaf9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.374312] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for the task: (returnval){ [ 821.374312] env[62914]: value = "task-4831899" [ 821.374312] env[62914]: _type = "Task" [ 821.374312] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.383559] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.500218] env[62914]: DEBUG nova.compute.manager [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Received event network-vif-plugged-fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 821.500488] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] Acquiring lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.501016] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.501016] env[62914]: DEBUG oslo_concurrency.lockutils [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.501217] env[62914]: DEBUG nova.compute.manager [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] No waiting events found dispatching network-vif-plugged-fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 821.501449] env[62914]: WARNING nova.compute.manager [req-f8e297ab-e40f-4ba9-b7f6-9c75d5a12551 req-f6fcd608-96f0-42da-aaa2-a55b76fff996 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Received unexpected event network-vif-plugged-fc0ce838-2709-475a-8c53-7dce16800b0d for instance with vm_state building and task_state spawning. [ 821.627949] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed42213-97b8-4c04-b52c-7c1cfe7e83fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.643943] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97772444-cf59-4461-856d-fe8364c5094e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.689019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.689019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquired lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.689172] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.692144] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963e80b9-3732-485d-b5f3-d045d249ed4d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.702757] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04f3662-9dcf-4393-9884-ba85dc08a348 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.721012] env[62914]: DEBUG nova.compute.provider_tree [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.759939] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831897, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.885426] env[62914]: DEBUG oslo_vmware.api [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Task: {'id': task-4831899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.394466} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.886042] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.889323] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 821.889323] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 821.889323] env[62914]: INFO nova.compute.manager [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Took 1.13 seconds to destroy the instance on the hypervisor. [ 821.889323] env[62914]: DEBUG oslo.service.loopingcall [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.889323] env[62914]: DEBUG nova.compute.manager [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 821.889323] env[62914]: DEBUG nova.network.neutron [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 822.224565] env[62914]: DEBUG nova.scheduler.client.report [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.263901] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831897, 'name': ReconfigVM_Task, 'duration_secs': 0.549679} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.264497] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfigured VM instance instance-00000038 to attach disk [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.265159] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7b6e165-620b-4359-ae43-e40408f052a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.267805] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.276305] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 822.276305] env[62914]: value = "task-4831900" [ 822.276305] env[62914]: _type = "Task" [ 822.276305] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.286453] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831900, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.510633] env[62914]: DEBUG nova.objects.instance [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'flavor' on Instance uuid 1fb67ac1-c0b7-48b9-8562-d457d46709bc {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.733418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.399s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.736647] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.822s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.738882] env[62914]: INFO nova.compute.claims [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.745109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-0acbfa32-adde-4a6e-bfb6-c745a0b8524a" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.745109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-0acbfa32-adde-4a6e-bfb6-c745a0b8524a" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.745109] env[62914]: DEBUG nova.objects.instance [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.777102] env[62914]: INFO nova.scheduler.client.report [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 968cbfbe-1570-48d6-890d-c7a680855574 [ 822.790907] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831900, 'name': Rename_Task, 'duration_secs': 0.249423} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.791218] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 822.791474] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f112dbb8-7c98-4b5e-8ef1-c720d56b7de7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.800164] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 822.800164] env[62914]: value = "task-4831901" [ 822.800164] env[62914]: _type = "Task" [ 822.800164] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.811416] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.885986] env[62914]: DEBUG nova.network.neutron [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Updating instance_info_cache with network_info: [{"id": "fc0ce838-2709-475a-8c53-7dce16800b0d", "address": "fa:16:3e:5b:12:61", "network": {"id": "1042a3e1-2e1e-4838-9a35-4fb30e62c033", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1900110279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15573bba5e5448498fde03c18c64f4e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0ce838-27", "ovs_interfaceid": "fc0ce838-2709-475a-8c53-7dce16800b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.019313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.019503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquired lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.019678] env[62914]: DEBUG nova.network.neutron [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 823.019858] env[62914]: DEBUG nova.objects.instance [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'info_cache' on Instance uuid 1fb67ac1-c0b7-48b9-8562-d457d46709bc {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.117097] env[62914]: DEBUG nova.network.neutron [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.291908] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ba488a51-1e68-4472-8d5e-38fc7450ac08 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "968cbfbe-1570-48d6-890d-c7a680855574" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.596s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.313708] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831901, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.388425] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Releasing lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.388842] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Instance network_info: |[{"id": "fc0ce838-2709-475a-8c53-7dce16800b0d", "address": "fa:16:3e:5b:12:61", "network": {"id": "1042a3e1-2e1e-4838-9a35-4fb30e62c033", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1900110279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15573bba5e5448498fde03c18c64f4e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0ce838-27", "ovs_interfaceid": "fc0ce838-2709-475a-8c53-7dce16800b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 823.389370] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:12:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc0ce838-2709-475a-8c53-7dce16800b0d', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.399140] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Creating folder: Project (15573bba5e5448498fde03c18c64f4e4). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 823.399508] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ccf223d-e1c9-4f5a-b06a-63f468d64b51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.411926] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Created folder: Project (15573bba5e5448498fde03c18c64f4e4) in parent group-v941773. [ 823.412942] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Creating folder: Instances. Parent ref: group-v941942. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 823.412942] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0145dbd6-8ecb-42f6-b016-f15311592862 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.425277] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Created folder: Instances in parent group-v941942. [ 823.425564] env[62914]: DEBUG oslo.service.loopingcall [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 823.425782] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 823.428627] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0757190-792b-469d-87fb-67a6a31b22a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.454520] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.454520] env[62914]: value = "task-4831904" [ 823.454520] env[62914]: _type = "Task" [ 823.454520] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.464381] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831904, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.526459] env[62914]: DEBUG nova.objects.base [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Object Instance<1fb67ac1-c0b7-48b9-8562-d457d46709bc> lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 823.551855] env[62914]: DEBUG nova.objects.instance [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.625506] env[62914]: INFO nova.compute.manager [-] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Took 1.74 seconds to deallocate network for instance. [ 823.817189] env[62914]: DEBUG oslo_vmware.api [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831901, 'name': PowerOnVM_Task, 'duration_secs': 0.862147} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.817432] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 823.817725] env[62914]: INFO nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Took 10.97 seconds to spawn the instance on the hypervisor. [ 823.817863] env[62914]: DEBUG nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 823.818836] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec15234-9abb-4e35-aea7-162610705e5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.972569] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831904, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.054652] env[62914]: DEBUG nova.objects.base [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance<2d48056c-d38f-4be1-b28b-71da14607870> lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 824.055559] env[62914]: DEBUG nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 824.140537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.238330] env[62914]: DEBUG nova.policy [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 824.349474] env[62914]: INFO nova.compute.manager [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Took 57.50 seconds to build instance. [ 824.473616] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831904, 'name': CreateVM_Task, 'duration_secs': 0.74919} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.473788] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 824.474989] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.478102] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.478102] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.478102] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2298f50-bca5-4cd2-b3f7-00b64cabcda2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.485438] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 824.485438] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9381d-479d-dd06-95b8-5aeca4bdfe6e" [ 824.485438] env[62914]: _type = "Task" [ 824.485438] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.496170] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5766c1fe-2ba2-4e15-8499-048577f2b79d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.502321] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9381d-479d-dd06-95b8-5aeca4bdfe6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.509031] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3403f474-26c2-46b2-89d1-094c05698037 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.547409] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588731bb-33fd-4c18-9f87-0e40985012e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.558199] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a129735-a6d6-46ff-abef-5131d0e67be5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.574856] env[62914]: DEBUG nova.compute.provider_tree [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.712453] env[62914]: DEBUG nova.network.neutron [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updating instance_info_cache with network_info: [{"id": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "address": "fa:16:3e:f0:25:3b", "network": {"id": "f14f887e-cc3f-4b7b-8cb7-59ef0df9bfca", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-468263138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "319610053c8a4ca19dcb0c0b3e6b6596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458d38ce-bc", "ovs_interfaceid": "458d38ce-bc0b-471c-a588-9d31e99cbe74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.847562] env[62914]: DEBUG nova.compute.manager [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Received event network-changed-fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 824.848605] env[62914]: DEBUG nova.compute.manager [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Refreshing instance network info cache due to event network-changed-fc0ce838-2709-475a-8c53-7dce16800b0d. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 824.849208] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] Acquiring lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.849208] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] Acquired lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.849396] env[62914]: DEBUG nova.network.neutron [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Refreshing network info cache for port fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 824.851580] env[62914]: DEBUG oslo_concurrency.lockutils [None req-01a208ef-5148-483c-bbf6-de294373a9f7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.269s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.002668] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9381d-479d-dd06-95b8-5aeca4bdfe6e, 'name': SearchDatastore_Task, 'duration_secs': 0.022819} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.004839] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.005316] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.005755] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.005936] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.006175] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.006631] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd4f0b0a-8207-4d2b-97b0-cdbf144f3e84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.023136] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.023501] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 825.025227] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6fe2b8-f7b9-4361-91a8-86995eb25eb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.035298] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 825.035298] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5271e07e-0f42-224a-72c2-9c3edacb07b8" [ 825.035298] env[62914]: _type = "Task" [ 825.035298] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.048440] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5271e07e-0f42-224a-72c2-9c3edacb07b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.080134] env[62914]: DEBUG nova.scheduler.client.report [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.215452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Releasing lock "refresh_cache-1fb67ac1-c0b7-48b9-8562-d457d46709bc" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.228952] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.229573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.355856] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 825.549842] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5271e07e-0f42-224a-72c2-9c3edacb07b8, 'name': SearchDatastore_Task, 'duration_secs': 0.024868} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.551093] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22b91d0-9b3e-4630-997b-504b26778731 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.560664] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 825.560664] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ddf56-f251-0411-7130-72507cdc5ff6" [ 825.560664] env[62914]: _type = "Task" [ 825.560664] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.570514] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ddf56-f251-0411-7130-72507cdc5ff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.586784] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.849s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.586784] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 825.591827] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.955s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.591827] env[62914]: INFO nova.compute.claims [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.724277] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 825.724277] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a578894-a003-43cd-b8b0-73115f507581 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.733211] env[62914]: DEBUG nova.network.neutron [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Updated VIF entry in instance network info cache for port fc0ce838-2709-475a-8c53-7dce16800b0d. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 825.733211] env[62914]: DEBUG nova.network.neutron [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Updating instance_info_cache with network_info: [{"id": "fc0ce838-2709-475a-8c53-7dce16800b0d", "address": "fa:16:3e:5b:12:61", "network": {"id": "1042a3e1-2e1e-4838-9a35-4fb30e62c033", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1900110279-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15573bba5e5448498fde03c18c64f4e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0ce838-27", "ovs_interfaceid": "fc0ce838-2709-475a-8c53-7dce16800b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.740946] env[62914]: DEBUG oslo_vmware.api [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 825.740946] env[62914]: value = "task-4831905" [ 825.740946] env[62914]: _type = "Task" [ 825.740946] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.757544] env[62914]: DEBUG oslo_vmware.api [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.886735] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.069229] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ddf56-f251-0411-7130-72507cdc5ff6, 'name': SearchDatastore_Task, 'duration_secs': 0.02134} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.069675] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.069954] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1ec89a28-d4f3-4324-bf14-c99c5ce05950/1ec89a28-d4f3-4324-bf14-c99c5ce05950.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 826.070422] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7caf39d-13c1-471e-ae35-8dda094171bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.079526] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 826.079526] env[62914]: value = "task-4831906" [ 826.079526] env[62914]: _type = "Task" [ 826.079526] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.088426] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831906, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.097029] env[62914]: DEBUG nova.compute.utils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.100906] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 826.101058] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 826.179926] env[62914]: DEBUG nova.policy [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fb02b5af55441b0b788b739fc8dc623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5adc4dc554ed4fe69f214161fd8ab9b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 826.235828] env[62914]: DEBUG oslo_concurrency.lockutils [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] Releasing lock "refresh_cache-1ec89a28-d4f3-4324-bf14-c99c5ce05950" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.236150] env[62914]: DEBUG nova.compute.manager [req-a8409968-940b-4967-8761-1fe0ec3580da req-1b4de7d4-ed5b-4d34-8e36-0e5ef0292ab1 service nova] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Received event network-vif-deleted-3d606e37-edb2-4b01-b58d-acec974dda62 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 826.253564] env[62914]: DEBUG oslo_vmware.api [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831905, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.594020] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831906, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.603331] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 826.608853] env[62914]: DEBUG nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Successfully updated port: 0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.709500] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Successfully created port: 4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.756795] env[62914]: DEBUG oslo_vmware.api [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831905, 'name': PowerOnVM_Task, 'duration_secs': 0.655647} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.759925] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 826.760420] env[62914]: DEBUG nova.compute.manager [None req-e07e4d5e-4840-4b49-953c-bf0e7a57e0e4 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 826.761810] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c77382-1092-4151-a1ca-30213e4c32c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.094695] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831906, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953379} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.095190] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 1ec89a28-d4f3-4324-bf14-c99c5ce05950/1ec89a28-d4f3-4324-bf14-c99c5ce05950.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 827.095190] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.095596] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ed1020d-6cdb-4bd5-8699-0ace8437304e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.108225] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 827.108225] env[62914]: value = "task-4831907" [ 827.108225] env[62914]: _type = "Task" [ 827.108225] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.123142] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.123602] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.124067] env[62914]: DEBUG nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 827.136340] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.251948] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8472216d-c182-4caf-a29b-08b0536fefc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.262533] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f381e4-0ca5-4a71-a909-b8d49f110544 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.305185] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee61d28-0d3a-465e-8e02-2a12522acb50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.318078] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372c45c2-3b93-4724-9b77-b266dfc24d6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.337250] env[62914]: DEBUG nova.compute.provider_tree [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.621454] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250158} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.621878] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.622720] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971c68d7-4935-43db-b996-df941c1b712a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.626954] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 827.659834] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 1ec89a28-d4f3-4324-bf14-c99c5ce05950/1ec89a28-d4f3-4324-bf14-c99c5ce05950.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.660171] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be2e68a0-1efc-4e20-9e5b-b923b2e98e27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.694644] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 827.694644] env[62914]: value = "task-4831908" [ 827.694644] env[62914]: _type = "Task" [ 827.694644] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.697904] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.698807] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.698807] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.698943] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.699269] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.699685] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.700239] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.700717] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.700895] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.701531] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.701531] env[62914]: DEBUG nova.virt.hardware [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.703395] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f877d6-a229-4cd3-9b16-84abac5ac4f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.724682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4135451e-2ec8-4229-b862-f1e643cc32cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.732873] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.822654] env[62914]: WARNING nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 827.822984] env[62914]: WARNING nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 827.842063] env[62914]: DEBUG nova.scheduler.client.report [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 828.096414] env[62914]: DEBUG nova.compute.manager [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-plugged-0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 828.096756] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.096997] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.097226] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.097412] env[62914]: DEBUG nova.compute.manager [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] No waiting events found dispatching network-vif-plugged-0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 828.097629] env[62914]: WARNING nova.compute.manager [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received unexpected event network-vif-plugged-0acbfa32-adde-4a6e-bfb6-c745a0b8524a for instance with vm_state active and task_state None. [ 828.097884] env[62914]: DEBUG nova.compute.manager [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-changed-0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 828.098092] env[62914]: DEBUG nova.compute.manager [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing instance network info cache due to event network-changed-0acbfa32-adde-4a6e-bfb6-c745a0b8524a. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 828.098369] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.201410] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 828.202452] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61e8824-453c-4994-95a9-ae3282b9f28a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.212946] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 828.213198] env[62914]: ERROR oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk due to incomplete transfer. [ 828.217133] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5530af0a-8b55-4e75-a8a3-c1c21f577b7e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.219542] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.226644] env[62914]: DEBUG oslo_vmware.rw_handles [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52234f5e-48f6-3ce8-786f-2744996cf83b/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 828.226870] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Uploaded image 8d584922-9f5f-403e-b8e7-e412d68ca5ee to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 828.229448] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 828.230142] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-657692ce-cf33-4d31-b5c5-7f9cbe8061fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.237408] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 828.237408] env[62914]: value = "task-4831909" [ 828.237408] env[62914]: _type = "Task" [ 828.237408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.253191] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831909, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.349838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.760s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.350635] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 828.354985] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.026s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.357469] env[62914]: INFO nova.compute.claims [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.656355] env[62914]: DEBUG nova.network.neutron [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55d78b2e-b665-4a1c-84fe-47e02f937395", "address": "fa:16:3e:2b:6f:44", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55d78b2e-b6", "ovs_interfaceid": "55d78b2e-b665-4a1c-84fe-47e02f937395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "address": "fa:16:3e:72:6c:64", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0acbfa32-ad", "ovs_interfaceid": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.727646] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.731030] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Successfully updated port: 4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.749766] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831909, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.868995] env[62914]: DEBUG nova.compute.utils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.870934] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 828.871884] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 828.913417] env[62914]: DEBUG nova.policy [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fb02b5af55441b0b788b739fc8dc623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5adc4dc554ed4fe69f214161fd8ab9b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 829.161738] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.162700] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.162998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.163447] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.163776] env[62914]: DEBUG nova.network.neutron [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Refreshing network info cache for port 0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 829.172054] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c92b9b-f983-492a-a316-7b71f74f32a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.176535] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Successfully created port: b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.194785] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.197743] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.198117] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.198273] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.198439] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.198598] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.198823] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.198991] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.199193] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.199362] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.199540] env[62914]: DEBUG nova.virt.hardware [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.206535] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfiguring VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 829.207058] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c194c75-aa84-4a14-af51-fa302bf54c3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.231409] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.232045] env[62914]: DEBUG oslo_vmware.api [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 829.232045] env[62914]: value = "task-4831910" [ 829.232045] env[62914]: _type = "Task" [ 829.232045] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.236566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.236566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.236566] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 829.245847] env[62914]: DEBUG oslo_vmware.api [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831910, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.254597] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831909, 'name': Destroy_Task, 'duration_secs': 0.962153} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.254941] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Destroyed the VM [ 829.255222] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 829.255514] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-adf41bfa-4cf3-421b-877b-e89d00d88d28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.263610] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 829.263610] env[62914]: value = "task-4831911" [ 829.263610] env[62914]: _type = "Task" [ 829.263610] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.273786] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831911, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.375707] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 829.704090] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.704090] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.737657] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831908, 'name': ReconfigVM_Task, 'duration_secs': 1.91953} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.741509] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 1ec89a28-d4f3-4324-bf14-c99c5ce05950/1ec89a28-d4f3-4324-bf14-c99c5ce05950.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.742302] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60ae1152-b86c-4412-9c65-66bf38e4e705 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.754673] env[62914]: DEBUG oslo_vmware.api [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.759484] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 829.759484] env[62914]: value = "task-4831912" [ 829.759484] env[62914]: _type = "Task" [ 829.759484] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.776176] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831912, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.783217] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831911, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.818650] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.951067] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b5f4c5-6a07-46e9-b8a7-1d0a4682bc6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.964816] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7322ef-a952-4eef-8a1c-e2e43ddaf9be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.007958] env[62914]: DEBUG nova.network.neutron [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updated VIF entry in instance network info cache for port 0acbfa32-adde-4a6e-bfb6-c745a0b8524a. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 830.008568] env[62914]: DEBUG nova.network.neutron [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55d78b2e-b665-4a1c-84fe-47e02f937395", "address": "fa:16:3e:2b:6f:44", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55d78b2e-b6", "ovs_interfaceid": "55d78b2e-b665-4a1c-84fe-47e02f937395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "address": "fa:16:3e:72:6c:64", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0acbfa32-ad", "ovs_interfaceid": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.013329] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6883f2-0392-40ee-84fa-c2e1468ce3b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.023340] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80799ed5-2950-40ed-b05d-821a0c041e17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.040938] env[62914]: DEBUG nova.compute.provider_tree [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.071998] env[62914]: DEBUG nova.network.neutron [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Updating instance_info_cache with network_info: [{"id": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "address": "fa:16:3e:71:a1:52", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a22be80-3c", "ovs_interfaceid": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.248438] env[62914]: DEBUG oslo_vmware.api [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831910, 'name': ReconfigVM_Task, 'duration_secs': 0.828701} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.249019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.249266] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfigured VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 830.275014] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831912, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.278451] env[62914]: DEBUG oslo_vmware.api [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4831911, 'name': RemoveSnapshot_Task, 'duration_secs': 0.896415} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.279177] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 830.279177] env[62914]: INFO nova.compute.manager [None req-89c1dc68-3f03-4c7b-99f8-6dfea3cac192 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Took 16.56 seconds to snapshot the instance on the hypervisor. [ 830.391707] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 830.415726] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.416042] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.416222] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.416416] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.416570] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.416722] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.416937] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.417114] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.417278] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.417445] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.417623] env[62914]: DEBUG nova.virt.hardware [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.418621] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e9f87d-f730-45b7-8389-9e3980787957 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.426956] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b656dfa-efd4-4b06-93fc-2f3f378aeeb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.518048] env[62914]: DEBUG oslo_concurrency.lockutils [req-b1027c51-5bde-4327-9905-e0b620823c31 req-c30f9898-e81d-4d9c-9bc9-4102b6ad8603 service nova] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.544369] env[62914]: DEBUG nova.scheduler.client.report [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.574430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.574805] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance network_info: |[{"id": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "address": "fa:16:3e:71:a1:52", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a22be80-3c", "ovs_interfaceid": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 830.575286] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:a1:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.583413] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating folder: Project (5adc4dc554ed4fe69f214161fd8ab9b9). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 830.583925] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-203a3c7a-50d1-4535-a522-692fd58f94a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.597165] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created folder: Project (5adc4dc554ed4fe69f214161fd8ab9b9) in parent group-v941773. [ 830.597397] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating folder: Instances. Parent ref: group-v941945. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 830.597675] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fabdeb8f-c716-4d26-bf46-19bf209f29b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.609992] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created folder: Instances in parent group-v941945. [ 830.610316] env[62914]: DEBUG oslo.service.loopingcall [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.610554] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 830.610839] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49bd49c4-e0a5-4030-8419-05a30520ae38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.632334] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.632334] env[62914]: value = "task-4831915" [ 830.632334] env[62914]: _type = "Task" [ 830.632334] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.641684] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831915, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.755068] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c16e977e-d44f-4b5d-bcd9-10372fd93939 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-0acbfa32-adde-4a6e-bfb6-c745a0b8524a" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.012s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.772814] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831912, 'name': Rename_Task, 'duration_secs': 0.595269} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.772814] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 830.772920] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ff8f66a-8584-41dc-a8b0-28c1849e2f03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.788781] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 830.788781] env[62914]: value = "task-4831916" [ 830.788781] env[62914]: _type = "Task" [ 830.788781] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.800730] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.805357] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 830.805719] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing instance network info cache due to event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 830.806100] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.806346] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.806573] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 831.049451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.050073] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 831.053477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.711s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.054711] env[62914]: INFO nova.compute.claims [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.147196] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831915, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.260308] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Successfully updated port: b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.302127] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831916, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.555053] env[62914]: DEBUG nova.compute.utils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.556563] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 831.556766] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 831.586703] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updated VIF entry in instance network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 831.587218] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.644720] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831915, 'name': CreateVM_Task, 'duration_secs': 0.629235} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.644907] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 831.645722] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.645919] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.646692] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 831.647864] env[62914]: DEBUG nova.policy [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e291489da35649d0a2c69f98714d89ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea39ac6e2d400ca89bbffc20d764ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.649577] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acab0415-d6ad-4bf8-9d52-201bbeb0c316 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.654486] env[62914]: DEBUG nova.compute.manager [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Received event network-vif-plugged-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 831.654752] env[62914]: DEBUG oslo_concurrency.lockutils [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] Acquiring lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.655135] env[62914]: DEBUG oslo_concurrency.lockutils [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] Lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.655135] env[62914]: DEBUG oslo_concurrency.lockutils [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] Lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.655334] env[62914]: DEBUG nova.compute.manager [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] No waiting events found dispatching network-vif-plugged-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.655473] env[62914]: WARNING nova.compute.manager [req-fbe1e0c3-3827-4091-8ac1-210fe791ed95 req-6ae96017-6832-42e6-808f-5b4e7dd56d75 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Received unexpected event network-vif-plugged-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa for instance with vm_state building and task_state spawning. [ 831.657973] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 831.657973] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52791df5-8fbb-a428-b511-3aea29e25f14" [ 831.657973] env[62914]: _type = "Task" [ 831.657973] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.667356] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52791df5-8fbb-a428-b511-3aea29e25f14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.765233] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.765388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.765665] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 831.801199] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831916, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.934230] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.934503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.061498] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 832.091860] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.092200] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Received event network-vif-plugged-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 832.092409] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Acquiring lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.092740] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.092921] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.093111] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] No waiting events found dispatching network-vif-plugged-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 832.093348] env[62914]: WARNING nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Received unexpected event network-vif-plugged-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 for instance with vm_state building and task_state spawning. [ 832.093588] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Received event network-changed-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 832.093768] env[62914]: DEBUG nova.compute.manager [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Refreshing instance network info cache due to event network-changed-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 832.093967] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Acquiring lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.094157] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Acquired lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.094295] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Refreshing network info cache for port 4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 832.170257] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52791df5-8fbb-a428-b511-3aea29e25f14, 'name': SearchDatastore_Task, 'duration_secs': 0.034161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.172684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.172941] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.173210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.173375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.173701] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.174202] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-420aea5c-547f-4a3a-8cea-1e9e2e24fa85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.189257] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.189472] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 832.190295] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6206bd65-eb90-459d-b9f5-5bed8b6341d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.196576] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 832.196576] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217eb59-dd34-1453-39c7-84764a2bda6a" [ 832.196576] env[62914]: _type = "Task" [ 832.196576] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.207974] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217eb59-dd34-1453-39c7-84764a2bda6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.305443] env[62914]: DEBUG oslo_vmware.api [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831916, 'name': PowerOnVM_Task, 'duration_secs': 1.219377} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.306421] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.308584] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 832.308838] env[62914]: INFO nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Took 12.26 seconds to spawn the instance on the hypervisor. [ 832.309529] env[62914]: DEBUG nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 832.312963] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef06165-be8e-464b-8855-4626e60906db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.387703] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Successfully created port: e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.514509] env[62914]: DEBUG nova.network.neutron [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Updating instance_info_cache with network_info: [{"id": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "address": "fa:16:3e:e5:f5:21", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8eb6717-ec", "ovs_interfaceid": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.714556] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217eb59-dd34-1453-39c7-84764a2bda6a, 'name': SearchDatastore_Task, 'duration_secs': 0.055941} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.715207] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c193a4b-bbb3-423d-92a5-685506e05060 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.724742] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 832.724742] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c951ed-9baa-624d-8ca5-39bce43a8d30" [ 832.724742] env[62914]: _type = "Task" [ 832.724742] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.736343] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c951ed-9baa-624d-8ca5-39bce43a8d30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.767903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10919851-6c5c-4d0c-ba13-e6b0e9e245db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.776334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baf3946-355a-4209-ac2c-843216b5b635 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.812459] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca8fb08-af70-4700-89d4-acb5adaaaeb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.821182] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ca6b44-4bc7-4e91-9341-4ded6924b18e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.839807] env[62914]: DEBUG nova.compute.provider_tree [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.848780] env[62914]: INFO nova.compute.manager [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Took 63.59 seconds to build instance. [ 832.907843] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Updated VIF entry in instance network info cache for port 4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 832.908254] env[62914]: DEBUG nova.network.neutron [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Updating instance_info_cache with network_info: [{"id": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "address": "fa:16:3e:71:a1:52", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a22be80-3c", "ovs_interfaceid": "4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.019573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.019573] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Instance network_info: |[{"id": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "address": "fa:16:3e:e5:f5:21", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8eb6717-ec", "ovs_interfaceid": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 833.019573] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:f5:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8eb6717-ecdc-4bbe-ad47-b975cf486bfa', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.028008] env[62914]: DEBUG oslo.service.loopingcall [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.028314] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 833.028547] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0707a87-0e5f-4522-98c5-1875eaa9eb0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.051374] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.051374] env[62914]: value = "task-4831917" [ 833.051374] env[62914]: _type = "Task" [ 833.051374] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.062389] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831917, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.070739] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 833.100794] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.101063] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.101342] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.101552] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.101704] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.101853] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.102296] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.102506] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.102680] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.102846] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.103035] env[62914]: DEBUG nova.virt.hardware [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.103882] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9ba5dd-3882-4af8-baa1-9c72d537cc06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.113166] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2841a44c-bc0e-4502-a50d-f3382a5b0406 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.195238] env[62914]: DEBUG nova.compute.manager [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Received event network-changed-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 833.195238] env[62914]: DEBUG nova.compute.manager [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Refreshing instance network info cache due to event network-changed-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 833.195442] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] Acquiring lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.195673] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] Acquired lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.195861] env[62914]: DEBUG nova.network.neutron [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Refreshing network info cache for port b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 833.235374] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c951ed-9baa-624d-8ca5-39bce43a8d30, 'name': SearchDatastore_Task, 'duration_secs': 0.041507} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.235674] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.235950] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 833.236248] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66207b6c-32cc-4871-824b-b4dbb55d1c3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.245362] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 833.245362] env[62914]: value = "task-4831918" [ 833.245362] env[62914]: _type = "Task" [ 833.245362] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.256117] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.348528] env[62914]: DEBUG nova.scheduler.client.report [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.352304] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e12bfa8-f465-4642-a0c5-24257aa43993 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.716s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.414608] env[62914]: DEBUG oslo_concurrency.lockutils [req-8655da6d-7d7f-4a3e-81a9-99412a5beb29 req-d45ebeda-656c-4704-a517-a2901890ba57 service nova] Releasing lock "refresh_cache-10102941-c31a-4ab1-be5a-801520d49fd7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.563307] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831917, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.757607] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831918, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.857920] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.805s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.859190] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 833.861797] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 833.866707] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.690s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.867930] env[62914]: INFO nova.compute.claims [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.992386] env[62914]: DEBUG nova.network.neutron [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Updated VIF entry in instance network info cache for port b8eb6717-ecdc-4bbe-ad47-b975cf486bfa. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 833.992866] env[62914]: DEBUG nova.network.neutron [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Updating instance_info_cache with network_info: [{"id": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "address": "fa:16:3e:e5:f5:21", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8eb6717-ec", "ovs_interfaceid": "b8eb6717-ecdc-4bbe-ad47-b975cf486bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.063120] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831917, 'name': CreateVM_Task, 'duration_secs': 0.729215} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.063311] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 834.064994] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.064994] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.064994] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 834.064994] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e86a7bf-2df1-46d1-a8f2-cc541204dc1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.071025] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 834.071025] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52894062-ddd2-ba94-50ae-a0b0dbed1f1a" [ 834.071025] env[62914]: _type = "Task" [ 834.071025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.079666] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52894062-ddd2-ba94-50ae-a0b0dbed1f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.168666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.168943] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.169186] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.169460] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.169550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.172057] env[62914]: INFO nova.compute.manager [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Terminating instance [ 834.174302] env[62914]: DEBUG nova.compute.manager [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 834.174585] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 834.175379] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cf6a62-c28d-40f3-ac31-877be53cf479 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.184789] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 834.186405] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4ce502a-7e7c-4f69-96d2-3976765d0500 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.194255] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 834.194255] env[62914]: value = "task-4831919" [ 834.194255] env[62914]: _type = "Task" [ 834.194255] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.204184] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.258319] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831918, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.815114} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.258319] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 834.258572] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.258755] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1d71574-34c5-426f-95a2-4c6d954bc27d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.266580] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 834.266580] env[62914]: value = "task-4831920" [ 834.266580] env[62914]: _type = "Task" [ 834.266580] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.277450] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831920, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.356485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-55d78b2e-b665-4a1c-84fe-47e02f937395" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.356880] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-55d78b2e-b665-4a1c-84fe-47e02f937395" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.369366] env[62914]: DEBUG nova.compute.utils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.373070] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 834.373242] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 834.394467] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.424297] env[62914]: DEBUG nova.policy [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '021e6445fa3144688f871a39e8317de0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23ba9ece80a24353ac072b643cb16df7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 834.495345] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6d07b59-5ee1-4418-ae57-eeb5722e328f req-a04b5fc1-55e5-4d5b-801a-c8ef9ff40cf4 service nova] Releasing lock "refresh_cache-4911baea-15df-46db-be11-fcf998eb0cb6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.507844] env[62914]: DEBUG nova.compute.manager [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Received event network-vif-plugged-e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 834.507844] env[62914]: DEBUG oslo_concurrency.lockutils [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.507844] env[62914]: DEBUG oslo_concurrency.lockutils [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.507844] env[62914]: DEBUG oslo_concurrency.lockutils [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.508043] env[62914]: DEBUG nova.compute.manager [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] No waiting events found dispatching network-vif-plugged-e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 834.508232] env[62914]: WARNING nova.compute.manager [req-722fa040-2585-4b3c-a86c-3247e944a76f req-df4f173f-09e3-4da0-9a56-2e7468794953 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Received unexpected event network-vif-plugged-e7fd222f-0127-4616-8d16-801a8c35a0d1 for instance with vm_state building and task_state spawning. [ 834.584742] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52894062-ddd2-ba94-50ae-a0b0dbed1f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.022443} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.585807] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.585807] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.585807] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.585807] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.586032] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.586308] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-546dbc67-3119-455c-8165-384f713ebc2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.599768] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.600028] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 834.600794] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dde2f29-ecc9-4e0e-ac67-2d44a488324a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.607582] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 834.607582] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c7eb3b-39e7-6de4-2136-d3502d722227" [ 834.607582] env[62914]: _type = "Task" [ 834.607582] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.618794] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c7eb3b-39e7-6de4-2136-d3502d722227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.708105] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831919, 'name': PowerOffVM_Task, 'duration_secs': 0.245255} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.708452] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 834.708718] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 834.709050] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-616de489-a8fe-4bea-aead-0149a3a70345 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.775019] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Successfully created port: 705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.787900] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831920, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138916} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.787900] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.788915] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71a75dd-fa02-4a17-813a-e5357e5cc5ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.818446] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.818829] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b6cdaf7-d5ba-4a2f-9c75-4154bafd0200 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.841071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 834.841071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 834.841071] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Deleting the datastore file [datastore1] 1ec89a28-d4f3-4324-bf14-c99c5ce05950 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.841071] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8f2b72d-666b-4d36-9e42-3fcb01513272 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.846563] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for the task: (returnval){ [ 834.846563] env[62914]: value = "task-4831922" [ 834.846563] env[62914]: _type = "Task" [ 834.846563] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.848475] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 834.848475] env[62914]: value = "task-4831923" [ 834.848475] env[62914]: _type = "Task" [ 834.848475] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.863342] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Successfully updated port: e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.863342] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.863342] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.873064] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5f4591-a33e-4113-a60d-67ec4ffa9be9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.875844] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.876127] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831923, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.878629] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 834.903252] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8151ac2b-e671-42b1-b43f-a89617d9dcd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.934265] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfiguring VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 834.937975] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8f36441-fd10-4a3e-9d72-2dac21eb6355 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.960830] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 834.960830] env[62914]: value = "task-4831924" [ 834.960830] env[62914]: _type = "Task" [ 834.960830] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.971242] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.124525] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c7eb3b-39e7-6de4-2136-d3502d722227, 'name': SearchDatastore_Task, 'duration_secs': 0.025457} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.124525] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be0413d3-1b4b-41f6-9394-d7dbb2fc6d61 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.131031] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 835.131031] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9982f-3cc7-e63a-0ab5-75a35b9a7a71" [ 835.131031] env[62914]: _type = "Task" [ 835.131031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.138740] env[62914]: DEBUG nova.compute.manager [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Received event network-changed-e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 835.138941] env[62914]: DEBUG nova.compute.manager [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Refreshing instance network info cache due to event network-changed-e7fd222f-0127-4616-8d16-801a8c35a0d1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 835.139263] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] Acquiring lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.139394] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] Acquired lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.139533] env[62914]: DEBUG nova.network.neutron [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Refreshing network info cache for port e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 835.146906] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9982f-3cc7-e63a-0ab5-75a35b9a7a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.359188] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.369199] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.379752] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.470952] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.497370] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66c4b04-c352-4736-9899-5e8fb82fef04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.506135] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9304b5c-c903-4eac-bb0b-6378966acbd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.538013] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11973308-6cb6-4410-9d4d-750363a301dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.547150] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad86a7d0-9282-472d-89b3-10a7680a1c85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.562153] env[62914]: DEBUG nova.compute.provider_tree [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.644153] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c9982f-3cc7-e63a-0ab5-75a35b9a7a71, 'name': SearchDatastore_Task, 'duration_secs': 0.044951} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.644642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.644696] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4911baea-15df-46db-be11-fcf998eb0cb6/4911baea-15df-46db-be11-fcf998eb0cb6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 835.645727] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b263135-e9b9-4f6a-bcc7-50ac181e6fa3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.654714] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 835.654714] env[62914]: value = "task-4831925" [ 835.654714] env[62914]: _type = "Task" [ 835.654714] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.663693] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.715308] env[62914]: DEBUG nova.network.neutron [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.853888] env[62914]: DEBUG nova.network.neutron [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.866419] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831923, 'name': ReconfigVM_Task, 'duration_secs': 0.584668} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.871012] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.871894] env[62914]: DEBUG oslo_vmware.api [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Task: {'id': task-4831922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519525} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.872180] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5a67192-13c1-463a-a7d4-44f58eca0639 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.874639] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.874853] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 835.875051] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 835.875235] env[62914]: INFO nova.compute.manager [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Took 1.70 seconds to destroy the instance on the hypervisor. [ 835.876559] env[62914]: DEBUG oslo.service.loopingcall [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.876559] env[62914]: DEBUG nova.compute.manager [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 835.876559] env[62914]: DEBUG nova.network.neutron [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.887132] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 835.887132] env[62914]: value = "task-4831926" [ 835.887132] env[62914]: _type = "Task" [ 835.887132] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.894634] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 835.900925] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831926, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.930369] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.930680] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.930883] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.931212] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.931306] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.931460] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.931699] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.931907] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.932123] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.932538] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.932787] env[62914]: DEBUG nova.virt.hardware [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.933736] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45015199-1810-44ac-ac68-f87c18af194b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.943263] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c7a328-d426-4025-adfb-8c00e8229567 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.971112] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.066051] env[62914]: DEBUG nova.scheduler.client.report [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.168245] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831925, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.361843] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a6679de-247a-45c6-b4d4-cdaee574e7b7 req-9d97ad6d-11d0-4de5-8d7f-ae917a30f7aa service nova] Releasing lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.361843] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.362243] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 836.398923] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831926, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.472104] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.473139] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Successfully updated port: 705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.487032] env[62914]: DEBUG nova.compute.manager [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received event network-vif-plugged-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 836.487142] env[62914]: DEBUG oslo_concurrency.lockutils [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.487343] env[62914]: DEBUG oslo_concurrency.lockutils [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] Lock "397c5401-a435-4170-b07d-a03488c73867-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.487483] env[62914]: DEBUG oslo_concurrency.lockutils [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] Lock "397c5401-a435-4170-b07d-a03488c73867-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.488378] env[62914]: DEBUG nova.compute.manager [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] No waiting events found dispatching network-vif-plugged-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.488378] env[62914]: WARNING nova.compute.manager [req-4a2e06de-3651-4db9-84a3-85d2f15d2170 req-f3ee2da6-cc32-49ef-bf2d-06af39bb8008 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received unexpected event network-vif-plugged-705a7360-47b1-4951-92f7-277ca049efa1 for instance with vm_state building and task_state spawning. [ 836.572779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.572779] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 836.575030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 32.946s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.666707] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79818} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.666922] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4911baea-15df-46db-be11-fcf998eb0cb6/4911baea-15df-46db-be11-fcf998eb0cb6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 836.667159] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.667690] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99635308-6972-4655-8ca7-dbf325b5da18 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.674794] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 836.674794] env[62914]: value = "task-4831927" [ 836.674794] env[62914]: _type = "Task" [ 836.674794] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.684924] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.883955] env[62914]: DEBUG nova.network.neutron [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.898952] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831926, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.900880] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 836.973598] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.975285] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.975385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.975535] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.066698] env[62914]: DEBUG nova.network.neutron [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [{"id": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "address": "fa:16:3e:72:0c:2d", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7fd222f-01", "ovs_interfaceid": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.078861] env[62914]: DEBUG nova.compute.utils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.101707] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 837.101707] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 837.142976] env[62914]: DEBUG nova.policy [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3757ee859d1a4cebbcc504c8c92f6489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1873cee9895d48cb97914fd7ca8392a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 837.189160] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109136} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.189160] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.189919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4993720e-4983-48a1-950f-086630bd0194 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.219684] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 4911baea-15df-46db-be11-fcf998eb0cb6/4911baea-15df-46db-be11-fcf998eb0cb6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.220038] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53254e65-0aac-44a3-8d80-0c66d1472440 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.238053] env[62914]: DEBUG nova.compute.manager [req-b0948bc2-9a2f-4689-8f5f-fca3b9ebd0a0 req-270dfcb4-bd5f-4777-b46f-c5d583a0af4c service nova] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Received event network-vif-deleted-fc0ce838-2709-475a-8c53-7dce16800b0d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 837.245161] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 837.245161] env[62914]: value = "task-4831928" [ 837.245161] env[62914]: _type = "Task" [ 837.245161] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.258655] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.286616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.286956] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.287273] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.287518] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.287725] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.290617] env[62914]: INFO nova.compute.manager [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Terminating instance [ 837.292922] env[62914]: DEBUG nova.compute.manager [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 837.293172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.294035] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da684b2-8caf-46b3-9f9f-ccfa74093750 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.302956] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 837.303323] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-923ceb50-581a-4653-8a17-b3093b3c6182 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.311311] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 837.311311] env[62914]: value = "task-4831929" [ 837.311311] env[62914]: _type = "Task" [ 837.311311] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.320763] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.387046] env[62914]: INFO nova.compute.manager [-] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Took 1.51 seconds to deallocate network for instance. [ 837.401496] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831926, 'name': Rename_Task, 'duration_secs': 1.289974} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.401866] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 837.402174] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22bc9ab9-1b9c-4b89-9e99-f99f22e9eaf6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.410538] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 837.410538] env[62914]: value = "task-4831930" [ 837.410538] env[62914]: _type = "Task" [ 837.410538] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.425763] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.435866] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Successfully created port: 5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.472690] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.530028] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.569678] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.570043] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance network_info: |[{"id": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "address": "fa:16:3e:72:0c:2d", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7fd222f-01", "ovs_interfaceid": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 837.570591] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:0c:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7fd222f-0127-4616-8d16-801a8c35a0d1', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.582140] env[62914]: DEBUG oslo.service.loopingcall [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.582372] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 837.582629] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2731e051-a9da-424f-9a18-e057468a2fdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.603181] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Applying migration context for instance 4cea2bd1-a238-4fb6-bc47-719894461228 as it has an incoming, in-progress migration 8e4a4d60-6c1d-42ca-b081-c15b4d2a896c. Migration status is reverting {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 837.603464] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Applying migration context for instance 2f7bc586-af68-4d9d-81e2-8247371dfa7f as it has an incoming, in-progress migration 5affa2b3-57aa-4caf-b07c-b4616c9bb3c4. Migration status is confirming {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 837.605872] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating resource usage from migration 8e4a4d60-6c1d-42ca-b081-c15b4d2a896c [ 837.605872] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating resource usage from migration 5affa2b3-57aa-4caf-b07c-b4616c9bb3c4 [ 837.608319] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 837.617204] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.617204] env[62914]: value = "task-4831931" [ 837.617204] env[62914]: _type = "Task" [ 837.617204] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.626749] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831931, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.645600] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance aede8da7-8bf2-4963-b08b-6e06007614a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.645600] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 6bdcd778-0942-41e7-a6fb-7c3413d34ef7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 837.645738] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 76dfbf82-0ed0-4621-890c-060b187b47e0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 837.646435] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 29a177e4-b5d7-4249-8fc5-2316f6891536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646435] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7d8287f9-10be-4834-8b7a-1b764145d1c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646435] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 2d48056c-d38f-4be1-b28b-71da14607870 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646435] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1fb67ac1-c0b7-48b9-8562-d457d46709bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646435] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e061304c-998b-4331-b60d-809916844a6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646435] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1d74504f-b641-42c6-a420-c80614d69b23 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 837.646726] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b477cd62-49c2-4e3c-98ea-b4154dda4986 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646726] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance baf28ebf-3ab8-465c-a13b-705ccf3510dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646898] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b77a3d27-fe9f-49fc-95d1-15fe82762833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.646898] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration 8e4a4d60-6c1d-42ca-b081-c15b4d2a896c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 837.647032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4cea2bd1-a238-4fb6-bc47-719894461228 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.648204] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 837.648438] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance dc99b470-4334-408d-8853-d2e9b9204d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.648604] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration 5affa2b3-57aa-4caf-b07c-b4616c9bb3c4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 837.648749] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 2f7bc586-af68-4d9d-81e2-8247371dfa7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.648889] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 1ec89a28-d4f3-4324-bf14-c99c5ce05950 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.649039] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 10102941-c31a-4ab1-be5a-801520d49fd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.649202] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4911baea-15df-46db-be11-fcf998eb0cb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.650020] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 557c0538-fc4a-403a-a9cb-b706e2260b1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.650020] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 397c5401-a435-4170-b07d-a03488c73867 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.650020] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bd81fcb7-abef-4b86-8dce-f07b1c226f2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 837.758561] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.803073] env[62914]: DEBUG nova.network.neutron [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [{"id": "705a7360-47b1-4951-92f7-277ca049efa1", "address": "fa:16:3e:be:78:78", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705a7360-47", "ovs_interfaceid": "705a7360-47b1-4951-92f7-277ca049efa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.822129] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831929, 'name': PowerOffVM_Task, 'duration_secs': 0.22222} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.826426] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 837.826426] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 837.826426] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-311e2099-2837-4f50-be8c-7ecb24192d90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.897681] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 837.897917] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 837.898271] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleting the datastore file [datastore2] baf28ebf-3ab8-465c-a13b-705ccf3510dc {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.898592] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc52259-af86-424e-a2d8-a0046371f90c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.903635] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.906012] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 837.906012] env[62914]: value = "task-4831933" [ 837.906012] env[62914]: _type = "Task" [ 837.906012] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.917421] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.922756] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831930, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.976163] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.128163] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831931, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.155968] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 32e8f18e-2116-43bd-9951-ad809ab95ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 838.258716] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831928, 'name': ReconfigVM_Task, 'duration_secs': 0.650361} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.259020] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 4911baea-15df-46db-be11-fcf998eb0cb6/4911baea-15df-46db-be11-fcf998eb0cb6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.259659] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9bb40b2-1c46-47a0-9be0-607e8ee47b95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.266705] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 838.266705] env[62914]: value = "task-4831934" [ 838.266705] env[62914]: _type = "Task" [ 838.266705] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.276284] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831934, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.306760] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.307183] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance network_info: |[{"id": "705a7360-47b1-4951-92f7-277ca049efa1", "address": "fa:16:3e:be:78:78", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705a7360-47", "ovs_interfaceid": "705a7360-47b1-4951-92f7-277ca049efa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 838.307679] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:78:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '705a7360-47b1-4951-92f7-277ca049efa1', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.316879] env[62914]: DEBUG oslo.service.loopingcall [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.317187] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 838.317431] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c8cbb00-6ec1-4e4a-b617-a42139d61531 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.339200] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.339200] env[62914]: value = "task-4831935" [ 838.339200] env[62914]: _type = "Task" [ 838.339200] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.348631] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831935, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.418135] env[62914]: DEBUG oslo_vmware.api [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4831933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150587} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.418828] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.419149] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 838.419355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.419535] env[62914]: INFO nova.compute.manager [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 838.419804] env[62914]: DEBUG oslo.service.loopingcall [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.420017] env[62914]: DEBUG nova.compute.manager [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 838.420115] env[62914]: DEBUG nova.network.neutron [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.425489] env[62914]: DEBUG oslo_vmware.api [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831930, 'name': PowerOnVM_Task, 'duration_secs': 0.826474} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.426167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 838.426394] env[62914]: INFO nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Took 10.80 seconds to spawn the instance on the hypervisor. [ 838.426553] env[62914]: DEBUG nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 838.427411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66321e45-a5f0-49cf-8f1a-79b113f2d085 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.474199] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.515301] env[62914]: DEBUG nova.compute.manager [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received event network-changed-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 838.515301] env[62914]: DEBUG nova.compute.manager [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing instance network info cache due to event network-changed-705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 838.515301] env[62914]: DEBUG oslo_concurrency.lockutils [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] Acquiring lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.515301] env[62914]: DEBUG oslo_concurrency.lockutils [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] Acquired lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.515711] env[62914]: DEBUG nova.network.neutron [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 838.619369] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 838.635630] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831931, 'name': CreateVM_Task, 'duration_secs': 0.562294} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.635951] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 838.637263] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.637594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.638033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.638704] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92877731-6d60-4863-80fc-82b59bd925b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.647158] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 838.647158] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5262d0b7-7ba6-967d-8510-a185f353a828" [ 838.647158] env[62914]: _type = "Task" [ 838.647158] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.656734] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5262d0b7-7ba6-967d-8510-a185f353a828, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.658939] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 838.659236] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 838.659434] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.659642] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 838.659832] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.660034] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 838.660303] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 838.660496] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 838.660655] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 838.660818] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 838.660993] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 838.661749] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 982936be-3cb1-4930-b135-8fc2019c5216 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 838.663603] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02056839-57b3-4766-969e-e3d623873a12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.674057] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87deb88-f202-4731-80bd-48bd780397ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.784208] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831934, 'name': Rename_Task, 'duration_secs': 0.292145} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.784208] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 838.784208] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-235b08fd-a4a9-4565-996c-ed9ad14d2fb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.794408] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 838.794408] env[62914]: value = "task-4831936" [ 838.794408] env[62914]: _type = "Task" [ 838.794408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.803873] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.850829] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831935, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.948277] env[62914]: INFO nova.compute.manager [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Took 64.06 seconds to build instance. [ 838.975097] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.977084] env[62914]: DEBUG nova.compute.manager [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Received event network-vif-plugged-5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 838.977352] env[62914]: DEBUG oslo_concurrency.lockutils [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] Acquiring lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.977648] env[62914]: DEBUG oslo_concurrency.lockutils [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.977762] env[62914]: DEBUG oslo_concurrency.lockutils [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.977903] env[62914]: DEBUG nova.compute.manager [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] No waiting events found dispatching network-vif-plugged-5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.978227] env[62914]: WARNING nova.compute.manager [req-ab9d2aa1-90bc-43ee-a138-c38b8ac5c422 req-a75f48bf-28d6-4c53-855e-32129c16f063 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Received unexpected event network-vif-plugged-5318eee4-4de3-4b6a-acef-6991fa42dabc for instance with vm_state building and task_state spawning. [ 839.158036] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5262d0b7-7ba6-967d-8510-a185f353a828, 'name': SearchDatastore_Task, 'duration_secs': 0.01628} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.158503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.158758] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.159015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.159184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.159386] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.159659] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac8f1479-751b-405e-8c98-7f8106c92ec9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.170318] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance c488ba7b-68cc-4876-934f-a11d33fca6ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 839.174265] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.174435] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 839.175231] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df3ef965-c4e0-4c72-b1a6-d31fb124ba1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.184151] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 839.184151] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c3d65-bee5-6def-c196-a08bc5d42e33" [ 839.184151] env[62914]: _type = "Task" [ 839.184151] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.196719] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c3d65-bee5-6def-c196-a08bc5d42e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.292258] env[62914]: DEBUG nova.network.neutron [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.305472] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831936, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.352025] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831935, 'name': CreateVM_Task, 'duration_secs': 0.635514} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.352025] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 839.352356] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.352599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.352997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.353400] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e20b20b-605c-4f14-8c68-fd1a94de235e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.359282] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 839.359282] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52272edb-4930-2a26-fe87-87f72e7f9e31" [ 839.359282] env[62914]: _type = "Task" [ 839.359282] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.368888] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52272edb-4930-2a26-fe87-87f72e7f9e31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.386510] env[62914]: DEBUG nova.network.neutron [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updated VIF entry in instance network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 839.386510] env[62914]: DEBUG nova.network.neutron [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [{"id": "705a7360-47b1-4951-92f7-277ca049efa1", "address": "fa:16:3e:be:78:78", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705a7360-47", "ovs_interfaceid": "705a7360-47b1-4951-92f7-277ca049efa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.409218] env[62914]: DEBUG nova.compute.manager [req-12bfe478-4ef4-4388-ac55-06275c6b61e0 req-63cb720a-fc58-405a-979e-58666555dd81 service nova] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Received event network-vif-deleted-141c4f39-6bf0-48ce-b82e-4a8d6a3a0cb5 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 839.454499] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3221c0d0-77b9-4fbe-845f-3fd145abc4e0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.363s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.477156] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.510329] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Successfully updated port: 5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.671430] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 12e8b0ac-0dec-4928-ae65-ab53992ecab5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 839.700775] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c3d65-bee5-6def-c196-a08bc5d42e33, 'name': SearchDatastore_Task, 'duration_secs': 0.015607} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.702834] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a49d41c4-c6a1-4c00-bf09-5ceee5ee1b21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.715593] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 839.715593] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c305d0-2282-60fc-1547-4304add601e8" [ 839.715593] env[62914]: _type = "Task" [ 839.715593] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.731313] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c305d0-2282-60fc-1547-4304add601e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.795076] env[62914]: INFO nova.compute.manager [-] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Took 1.37 seconds to deallocate network for instance. [ 839.812106] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831936, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.871599] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52272edb-4930-2a26-fe87-87f72e7f9e31, 'name': SearchDatastore_Task, 'duration_secs': 0.053019} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.872031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.872287] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.872500] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.888823] env[62914]: DEBUG oslo_concurrency.lockutils [req-01933ff2-9f59-4161-851a-dd1720a13d56 req-6f976bad-fdea-44bb-830e-636a204550c8 service nova] Releasing lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.957032] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 839.977507] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.014657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.014657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.015164] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.174792] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance ea06d3c3-d836-4e66-ac66-42f9886cd5de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 840.229995] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c305d0-2282-60fc-1547-4304add601e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015821} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.230410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.230867] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 840.231599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.231759] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.232407] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76d6b990-6967-4acb-82a0-3ce98c1b64c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.235158] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25d665a2-a494-474c-a8ce-4050aa0b3819 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.243707] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 840.243707] env[62914]: value = "task-4831937" [ 840.243707] env[62914]: _type = "Task" [ 840.243707] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.248785] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.248885] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 840.250371] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba1abfb-fe84-48e1-8a96-99a2f0450d5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.257837] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.264417] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 840.264417] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a8187a-1332-20f6-471b-d72977dd91e5" [ 840.264417] env[62914]: _type = "Task" [ 840.264417] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.274148] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a8187a-1332-20f6-471b-d72977dd91e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.310495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.310973] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831936, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.477876] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.481434] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.565937] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.678746] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 840.743596] env[62914]: DEBUG nova.compute.manager [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Received event network-changed-5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 840.743885] env[62914]: DEBUG nova.compute.manager [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Refreshing instance network info cache due to event network-changed-5318eee4-4de3-4b6a-acef-6991fa42dabc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 840.744151] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] Acquiring lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.756545] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831937, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.777536] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a8187a-1332-20f6-471b-d72977dd91e5, 'name': SearchDatastore_Task, 'duration_secs': 0.017729} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.779182] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Updating instance_info_cache with network_info: [{"id": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "address": "fa:16:3e:29:de:af", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5318eee4-4d", "ovs_interfaceid": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.782638] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08d3e9ae-7b46-4185-add5-4168f886eff9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.792034] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 840.792034] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52daeaf6-1543-96a3-da77-e267f97b7a41" [ 840.792034] env[62914]: _type = "Task" [ 840.792034] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.804403] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52daeaf6-1543-96a3-da77-e267f97b7a41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.812437] env[62914]: DEBUG oslo_vmware.api [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4831936, 'name': PowerOnVM_Task, 'duration_secs': 1.906675} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.812841] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 840.814169] env[62914]: INFO nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Took 10.42 seconds to spawn the instance on the hypervisor. [ 840.814169] env[62914]: DEBUG nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 840.814169] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520b02ca-aa57-472f-9843-9d707d373cb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.978772] env[62914]: DEBUG oslo_vmware.api [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831924, 'name': ReconfigVM_Task, 'duration_secs': 5.859142} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.979135] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.979315] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Reconfigured VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 841.183990] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance cca4bbf9-8864-4805-b95e-954e6b570eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 841.258879] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831937, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.289029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.289029] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Instance network_info: |[{"id": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "address": "fa:16:3e:29:de:af", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5318eee4-4d", "ovs_interfaceid": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 841.289029] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] Acquired lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.289029] env[62914]: DEBUG nova.network.neutron [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Refreshing network info cache for port 5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.289029] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:de:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5318eee4-4de3-4b6a-acef-6991fa42dabc', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.302116] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating folder: Project (1873cee9895d48cb97914fd7ca8392a0). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 841.302974] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc5b59e9-1ac7-4080-ac50-53f5f10b843f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.320415] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52daeaf6-1543-96a3-da77-e267f97b7a41, 'name': SearchDatastore_Task, 'duration_secs': 0.091953} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.320415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.320415] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 397c5401-a435-4170-b07d-a03488c73867/397c5401-a435-4170-b07d-a03488c73867.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 841.320607] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f0e81a5-96fa-4d5a-a192-7da5162b008c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.331629] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created folder: Project (1873cee9895d48cb97914fd7ca8392a0) in parent group-v941773. [ 841.331838] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating folder: Instances. Parent ref: group-v941951. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 841.336251] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed52b334-3200-4efa-802a-668875fd5d3e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.340503] env[62914]: INFO nova.compute.manager [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Took 65.75 seconds to build instance. [ 841.343487] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 841.343487] env[62914]: value = "task-4831939" [ 841.343487] env[62914]: _type = "Task" [ 841.343487] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.354500] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.356097] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created folder: Instances in parent group-v941951. [ 841.356361] env[62914]: DEBUG oslo.service.loopingcall [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.356567] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 841.356789] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f23d5d84-34d6-44d8-8579-e47fb927ae2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.378425] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.378425] env[62914]: value = "task-4831941" [ 841.378425] env[62914]: _type = "Task" [ 841.378425] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.386873] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831941, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.686305] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 455965de-816d-4ab2-9d5e-a12b06893e6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 841.756750] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831937, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.156555} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.757072] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 841.757295] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.757559] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56d12a5b-6cf8-4e74-b9c0-770a63d7015b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.765599] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 841.765599] env[62914]: value = "task-4831942" [ 841.765599] env[62914]: _type = "Task" [ 841.765599] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.777232] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.847288] env[62914]: DEBUG oslo_concurrency.lockutils [None req-75c72e08-4241-49b2-a878-b89e661cf35c tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.733s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.861698] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.894061] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831941, 'name': CreateVM_Task, 'duration_secs': 0.404247} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.894991] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 841.894991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.895211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.895540] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.895809] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54077ffc-b562-4062-b20d-8d21df644a54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.902067] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 841.902067] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52de6f15-67b1-4a20-4c59-04236e300994" [ 841.902067] env[62914]: _type = "Task" [ 841.902067] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.913333] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52de6f15-67b1-4a20-4c59-04236e300994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.032420] env[62914]: DEBUG nova.network.neutron [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Updated VIF entry in instance network info cache for port 5318eee4-4de3-4b6a-acef-6991fa42dabc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 842.032807] env[62914]: DEBUG nova.network.neutron [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Updating instance_info_cache with network_info: [{"id": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "address": "fa:16:3e:29:de:af", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5318eee4-4d", "ovs_interfaceid": "5318eee4-4de3-4b6a-acef-6991fa42dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.189492] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 842.281025] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.327209} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.281025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.281580] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc9aa56-4fb1-47ce-adf8-bf37d259ef0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.315118] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.315603] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bbfe7a4-ac34-49ba-a1c0-5db2267327e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.337368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.337619] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.337808] env[62914]: DEBUG nova.network.neutron [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.339332] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 842.339332] env[62914]: value = "task-4831943" [ 842.339332] env[62914]: _type = "Task" [ 842.339332] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.355476] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.355828] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.356117] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.356359] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.356620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.359040] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 842.361793] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831943, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.362698] env[62914]: INFO nova.compute.manager [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Terminating instance [ 842.366351] env[62914]: DEBUG nova.compute.manager [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 842.366475] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 842.367934] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4bbf81-f651-4c2a-a9db-e73528e6dbf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.374527] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831939, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.379703] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.380019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1faccdee-a7ca-443c-ad39-b1acefd74c96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.386481] env[62914]: DEBUG oslo_vmware.api [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 842.386481] env[62914]: value = "task-4831944" [ 842.386481] env[62914]: _type = "Task" [ 842.386481] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.413081] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52de6f15-67b1-4a20-4c59-04236e300994, 'name': SearchDatastore_Task, 'duration_secs': 0.042504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.413482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.413746] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.413995] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.414308] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.414556] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.414671] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c756cea-81c4-4fa8-9117-309def9d97d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.430604] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.430917] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 842.432998] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-479f9753-de31-4588-8889-c036f7f612c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.443114] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 842.443114] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52735049-3723-34c2-8671-9744ccb16e7a" [ 842.443114] env[62914]: _type = "Task" [ 842.443114] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.458515] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52735049-3723-34c2-8671-9744ccb16e7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.535908] env[62914]: DEBUG oslo_concurrency.lockutils [req-9bd7edda-4701-488c-af8b-be5a7fa24635 req-f938f608-4cfd-4fd0-a573-7290d574b555 service nova] Releasing lock "refresh_cache-bd81fcb7-abef-4b86-8dce-f07b1c226f2f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.693151] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 83de3d7c-2308-4678-ae90-a30705f6a8c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 842.852081] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831943, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.868034] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831939, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.884616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.901518] env[62914]: DEBUG oslo_vmware.api [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831944, 'name': PowerOffVM_Task, 'duration_secs': 0.406862} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.903887] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 842.904120] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 842.905484] env[62914]: DEBUG nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-deleted-55d78b2e-b665-4a1c-84fe-47e02f937395 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 842.905681] env[62914]: INFO nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Neutron deleted interface 55d78b2e-b665-4a1c-84fe-47e02f937395; detaching it from the instance and deleting it from the info cache [ 842.905970] env[62914]: DEBUG nova.network.neutron [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "address": "fa:16:3e:72:6c:64", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0acbfa32-ad", "ovs_interfaceid": "0acbfa32-adde-4a6e-bfb6-c745a0b8524a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.907667] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a83380e4-88db-4ac4-83bc-1318a95def3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.958341] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52735049-3723-34c2-8671-9744ccb16e7a, 'name': SearchDatastore_Task, 'duration_secs': 0.030697} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.959580] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90438ff6-c441-4dae-ab51-e62d37369420 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.967056] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 842.967056] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e23e4-662a-6ac9-9755-a16093dd49fb" [ 842.967056] env[62914]: _type = "Task" [ 842.967056] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.981028] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e23e4-662a-6ac9-9755-a16093dd49fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.034215] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.034477] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.034682] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleting the datastore file [datastore2] 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.034992] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ed75fe9-cea6-46d6-af37-3180988132e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.047875] env[62914]: DEBUG oslo_vmware.api [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 843.047875] env[62914]: value = "task-4831946" [ 843.047875] env[62914]: _type = "Task" [ 843.047875] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.058465] env[62914]: DEBUG oslo_vmware.api [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.166572] env[62914]: INFO nova.network.neutron [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Port 55d78b2e-b665-4a1c-84fe-47e02f937395 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 843.167040] env[62914]: INFO nova.network.neutron [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Port 0acbfa32-adde-4a6e-bfb6-c745a0b8524a from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 843.167145] env[62914]: DEBUG nova.network.neutron [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.197736] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance af141439-1c36-4184-9775-d1e30ee77ddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 843.200630] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "55192659-4d65-4e74-a47f-46d650b6b095" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.200896] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.351979] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831943, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.363816] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831939, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.763409} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.364450] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 397c5401-a435-4170-b07d-a03488c73867/397c5401-a435-4170-b07d-a03488c73867.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 843.364991] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.365579] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a46c0671-572d-4e69-867c-7181d459facb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.377039] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 843.377039] env[62914]: value = "task-4831947" [ 843.377039] env[62914]: _type = "Task" [ 843.377039] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.391441] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831947, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.408483] env[62914]: DEBUG oslo_concurrency.lockutils [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] Acquiring lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.479626] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529e23e4-662a-6ac9-9755-a16093dd49fb, 'name': SearchDatastore_Task, 'duration_secs': 0.017974} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.480053] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.480420] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd81fcb7-abef-4b86-8dce-f07b1c226f2f/bd81fcb7-abef-4b86-8dce-f07b1c226f2f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 843.480715] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af3eb7d0-663c-4690-b4d7-d9629449b5a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.488304] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 843.488304] env[62914]: value = "task-4831948" [ 843.488304] env[62914]: _type = "Task" [ 843.488304] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.497028] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.557108] env[62914]: DEBUG oslo_vmware.api [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4831946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317936} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.557403] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.557629] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 843.557817] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.557998] env[62914]: INFO nova.compute.manager [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Took 1.19 seconds to destroy the instance on the hypervisor. [ 843.558333] env[62914]: DEBUG oslo.service.loopingcall [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.558559] env[62914]: DEBUG nova.compute.manager [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 843.558659] env[62914]: DEBUG nova.network.neutron [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.622614] env[62914]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 0acbfa32-adde-4a6e-bfb6-c745a0b8524a could not be found.", "detail": ""}} {{(pid=62914) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 843.622614] env[62914]: DEBUG nova.network.neutron [-] Unable to show port 0acbfa32-adde-4a6e-bfb6-c745a0b8524a as it no longer exists. {{(pid=62914) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 843.670065] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.704040] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 43227b1e-c90a-47d0-a4f5-fd0af0826e94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 843.704040] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 843.704040] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4608MB phys_disk=100GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '18', 'num_vm_active': '11', 'num_task_None': '12', 'num_os_type_None': '18', 'num_proj_8e2b3db08ee34716be135d72b3ddda8d': '2', 'io_workload': '5', 'num_vm_resized': '2', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1', 'num_task_resize_reverting': '1', 'num_proj_7cda9ee54ad14f479838a54276dac349': '1', 'num_proj_23ba9ece80a24353ac072b643cb16df7': '2', 'num_proj_2562164f04b045a59b3b501d2b0014ec': '1', 'num_proj_319610053c8a4ca19dcb0c0b3e6b6596': '3', 'num_proj_b19293a423174c20963c000441db100e': '1', 'num_proj_d271710592bf47b79e16552221fe7107': '1', 'num_proj_adf406f1352240aba2338e64b8f182b4': '1', 'num_task_deleting': '1', 'num_proj_15573bba5e5448498fde03c18c64f4e4': '1', 'num_vm_building': '5', 'num_task_spawning': '4', 'num_proj_5adc4dc554ed4fe69f214161fd8ab9b9': '2', 'num_proj_14ea39ac6e2d400ca89bbffc20d764ef': '1', 'num_proj_1873cee9895d48cb97914fd7ca8392a0': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 843.851674] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831943, 'name': ReconfigVM_Task, 'duration_secs': 1.237887} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.854889] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.855880] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee46819c-22e6-4f03-a0b0-e32b05fa89ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.864522] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 843.864522] env[62914]: value = "task-4831949" [ 843.864522] env[62914]: _type = "Task" [ 843.864522] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.878040] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831949, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.893415] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831947, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10956} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.893415] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.893843] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead38c0e-bcc0-4907-8bc3-21f6f19252ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.922104] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 397c5401-a435-4170-b07d-a03488c73867/397c5401-a435-4170-b07d-a03488c73867.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.925683] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-668050cd-0ac3-4f64-9b73-0284a1be8f95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.948541] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 843.948541] env[62914]: value = "task-4831950" [ 843.948541] env[62914]: _type = "Task" [ 843.948541] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.964872] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.002755] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831948, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.175269] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4356f038-efc5-41c9-b3ea-b4f887c88cf0 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-2d48056c-d38f-4be1-b28b-71da14607870-55d78b2e-b665-4a1c-84fe-47e02f937395" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.818s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.352828] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3860c8b6-6e84-4d81-9fad-9e608a8f19c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.361946] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a61e8f9-8dec-485e-a6fc-02a78b118e9e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.374878] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831949, 'name': Rename_Task, 'duration_secs': 0.408022} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.401726] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 844.402238] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d99626a-d329-4792-b716-44516a426962 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.405143] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9703c79a-c335-4e04-aadb-7477e58e7f4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.416262] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f39f649-4747-4a3a-8504-5facb165e0f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.421565] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 844.421565] env[62914]: value = "task-4831951" [ 844.421565] env[62914]: _type = "Task" [ 844.421565] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.435027] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.444079] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831951, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.459028] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.503034] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.895625} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.503034] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd81fcb7-abef-4b86-8dce-f07b1c226f2f/bd81fcb7-abef-4b86-8dce-f07b1c226f2f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 844.503034] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.503034] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f075434a-6eb6-44f5-97c8-87bae4f103d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.510454] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 844.510454] env[62914]: value = "task-4831952" [ 844.510454] env[62914]: _type = "Task" [ 844.510454] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.521478] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831952, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.623025] env[62914]: DEBUG nova.network.neutron [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.932517] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831951, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.934683] env[62914]: DEBUG nova.compute.manager [req-93cf8b43-d3db-460c-96b3-24119b2cc3cc req-48192299-c8e1-4e4d-82f8-71f26ee07222 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-deleted-06e36426-302a-4bcd-bb7a-f9d6dd3a72c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 844.940019] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.961473] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.021067] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831952, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064934} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.021407] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.022180] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d70ec1-032d-4abb-811a-17f481738e49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.050527] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] bd81fcb7-abef-4b86-8dce-f07b1c226f2f/bd81fcb7-abef-4b86-8dce-f07b1c226f2f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.050963] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97fdc1ce-79cc-4344-a6cf-7e65a1642d46 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.081312] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 845.081312] env[62914]: value = "task-4831953" [ 845.081312] env[62914]: _type = "Task" [ 845.081312] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.094423] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831953, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.127226] env[62914]: INFO nova.compute.manager [-] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Took 1.57 seconds to deallocate network for instance. [ 846.168146] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 846.168408] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.593s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.169726] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.170053] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.373s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.170267] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.172351] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 40.744s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.174124] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.174276] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Cleaning up deleted instances {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 846.190266] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831950, 'name': ReconfigVM_Task, 'duration_secs': 1.158139} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.190607] env[62914]: DEBUG oslo_vmware.api [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4831951, 'name': PowerOnVM_Task, 'duration_secs': 1.442636} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.193563] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 397c5401-a435-4170-b07d-a03488c73867/397c5401-a435-4170-b07d-a03488c73867.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.194306] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 846.194475] env[62914]: INFO nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Took 13.12 seconds to spawn the instance on the hypervisor. [ 846.194661] env[62914]: DEBUG nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 846.194942] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831953, 'name': ReconfigVM_Task, 'duration_secs': 1.07588} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.195812] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0627f288-2038-427b-ba09-a4cac3ea231b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.197878] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07e264d-a854-4b1f-8e2f-dadca93f2d89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.200714] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Reconfigured VM instance instance-0000003f to attach disk [datastore2] bd81fcb7-abef-4b86-8dce-f07b1c226f2f/bd81fcb7-abef-4b86-8dce-f07b1c226f2f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.201608] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e33c9e6c-3841-4888-b4ab-b85d1b193dc2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.212624] env[62914]: INFO nova.scheduler.client.report [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Deleted allocations for instance 76dfbf82-0ed0-4621-890c-060b187b47e0 [ 846.214841] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 846.214841] env[62914]: value = "task-4831955" [ 846.214841] env[62914]: _type = "Task" [ 846.214841] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.215117] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 846.215117] env[62914]: value = "task-4831954" [ 846.215117] env[62914]: _type = "Task" [ 846.215117] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.230847] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831955, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.234949] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831954, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.674935] env[62914]: DEBUG nova.objects.instance [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lazy-loading 'migration_context' on Instance uuid 4cea2bd1-a238-4fb6-bc47-719894461228 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.685481] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] There are 40 instances to clean {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11313}} [ 846.685744] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 968cbfbe-1570-48d6-890d-c7a680855574] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 846.737872] env[62914]: INFO nova.compute.manager [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Took 68.43 seconds to build instance. [ 846.739268] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2f2f42e1-02e7-4a6c-bd75-fa16b3ea66f1 tempest-ServersV294TestFqdnHostnames-1806071593 tempest-ServersV294TestFqdnHostnames-1806071593-project-member] Lock "76dfbf82-0ed0-4621-890c-060b187b47e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.603s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.744289] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831955, 'name': Rename_Task, 'duration_secs': 0.153026} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.747331] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 846.747904] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831954, 'name': Rename_Task, 'duration_secs': 0.2608} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.751241] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db534970-7030-4a6d-879e-9be23b9a7640 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.752959] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 846.754212] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82eda633-4624-417b-8d6f-ca519adcd196 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.762227] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 846.762227] env[62914]: value = "task-4831956" [ 846.762227] env[62914]: _type = "Task" [ 846.762227] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.764021] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 846.764021] env[62914]: value = "task-4831957" [ 846.764021] env[62914]: _type = "Task" [ 846.764021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.779498] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.784093] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.197208] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: bc6da94e-4de8-4e56-a071-d04c5e5dad18] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 847.225775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ca36ac-bf04-4077-a92e-347964b3d81c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.240678] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b51aae01-f294-48ac-b01c-7e03cdf44b3a tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.635s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.244401] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe4b597-f85e-402b-a0b1-9b2d748b97bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.287820] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992262dd-d212-488b-badd-4d9de824cbd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.299081] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831957, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.307018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b216b5aa-c3d0-41da-844e-db9e07f5bf4d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.307827] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831956, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.319933] env[62914]: DEBUG nova.compute.provider_tree [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.702138] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e6544702-bde7-4056-8a50-adede5c6a9d6] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 847.749253] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 847.797753] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831956, 'name': PowerOnVM_Task, 'duration_secs': 0.90622} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.801097] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 847.801423] env[62914]: INFO nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Took 9.18 seconds to spawn the instance on the hypervisor. [ 847.801620] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 847.801926] env[62914]: DEBUG oslo_vmware.api [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831957, 'name': PowerOnVM_Task, 'duration_secs': 0.797798} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.802969] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95018ece-73e2-4b8b-91fb-9fca3fe38eff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.805681] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 847.805901] env[62914]: INFO nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 11.91 seconds to spawn the instance on the hypervisor. [ 847.806097] env[62914]: DEBUG nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 847.806846] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df04987d-09db-4384-9d95-6c21f3509f27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.823795] env[62914]: DEBUG nova.scheduler.client.report [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.211730] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 8b83f82b-42f7-4f33-abc4-ff278d343309] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 848.267923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.335146] env[62914]: INFO nova.compute.manager [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 64.01 seconds to build instance. [ 848.339892] env[62914]: INFO nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Took 57.19 seconds to build instance. [ 848.715318] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1342d15d-fbef-4709-adf6-f827bc13d3ca] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 848.839537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-44fd8089-175b-4929-b3bf-9f79f91ddc38 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.380s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.843557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.670s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.854330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 41.377s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.858656] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.849s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.033190] env[62914]: DEBUG nova.compute.manager [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 849.219594] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 5a704020-921e-4ede-9fd9-b745c027a158] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 849.366926] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 849.371203] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 849.556137] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.725164] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4496a977-30b2-4323-a561-884633958cdf] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 849.831557] env[62914]: DEBUG nova.compute.manager [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 849.831744] env[62914]: DEBUG nova.compute.manager [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing instance network info cache due to event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 849.831975] env[62914]: DEBUG oslo_concurrency.lockutils [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.832160] env[62914]: DEBUG oslo_concurrency.lockutils [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.832317] env[62914]: DEBUG nova.network.neutron [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.912425] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.913771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.038072] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f81095a-d051-40a3-95f6-0b8ae6b5482d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.046626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862dec9f-190b-44d7-9169-2b21ab1fd3b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.086498] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c3b9d0-96c1-4669-8d4b-7c405e6257bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.096448] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2bd980-c6fd-4a31-93ee-cee42b1eed16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.114513] env[62914]: DEBUG nova.compute.provider_tree [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.231560] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: bf2e9634-66ee-4b6a-a148-bc77420d793f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 850.408339] env[62914]: INFO nova.compute.manager [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Swapping old allocation on dict_keys(['f2f7a014-852b-4b37-9610-c5761f4b0175']) held by migration 8e4a4d60-6c1d-42ca-b081-c15b4d2a896c for instance [ 850.436358] env[62914]: DEBUG nova.scheduler.client.report [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Overwriting current allocation {'allocations': {'f2f7a014-852b-4b37-9610-c5761f4b0175': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 94}}, 'project_id': '8e2b3db08ee34716be135d72b3ddda8d', 'user_id': 'dda0f12511324c52b00236c75b33acc6', 'consumer_generation': 1} on consumer 4cea2bd1-a238-4fb6-bc47-719894461228 {{(pid=62914) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 850.547867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.548332] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquired lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.548332] env[62914]: DEBUG nova.network.neutron [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 850.618493] env[62914]: DEBUG nova.scheduler.client.report [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.625459] env[62914]: DEBUG nova.network.neutron [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updated VIF entry in instance network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 850.625826] env[62914]: DEBUG nova.network.neutron [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.735183] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: cead3557-080d-4956-a957-cac449bb69f6] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 851.130869] env[62914]: DEBUG oslo_concurrency.lockutils [req-8f16dff8-255f-460b-96ae-5bb9f6806d27 req-cc5d9850-cf4c-4b20-a583-478340806dc1 service nova] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.240333] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 12aa02f0-a232-427a-80ba-1faa12c4d43a] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 851.313748] env[62914]: DEBUG nova.network.neutron [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [{"id": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "address": "fa:16:3e:88:52:2b", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc148a862-a6", "ovs_interfaceid": "c148a862-a6a8-4c52-b1df-8e764ee00e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.632439] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.779s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.632670] env[62914]: DEBUG nova.compute.manager [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62914) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 851.636555] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.916s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.637930] env[62914]: INFO nova.compute.claims [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.644025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "bdec185e-2af7-4379-8c67-03e125750bb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.644025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.742713] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: fed831e0-4518-4025-89b1-7f6b644e013d] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 851.819019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Releasing lock "refresh_cache-4cea2bd1-a238-4fb6-bc47-719894461228" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.819019] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.819019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b44304cc-9468-407e-8814-48900e13b7d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.828247] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 851.828247] env[62914]: value = "task-4831958" [ 851.828247] env[62914]: _type = "Task" [ 851.828247] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.838691] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.215151] env[62914]: INFO nova.scheduler.client.report [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted allocation for migration 5affa2b3-57aa-4caf-b07c-b4616c9bb3c4 [ 852.246096] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 54185b06-7ccb-4740-a6ee-213bbfa6365b] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 852.278428] env[62914]: DEBUG nova.compute.manager [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 852.278717] env[62914]: DEBUG nova.compute.manager [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing instance network info cache due to event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 852.279551] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.279551] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.279551] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 852.336871] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831958, 'name': PowerOffVM_Task, 'duration_secs': 0.44427} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.337188] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.337856] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:24:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd1b046e-6be2-4ac8-bbb2-0adf61fb18f6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1771667993',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.338121] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.338305] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.338497] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.338647] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.338799] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.339015] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.339184] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.339381] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.339567] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.339740] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.344940] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6e76279-f6fb-42e9-bf98-f38563d12d70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.362907] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 852.362907] env[62914]: value = "task-4831959" [ 852.362907] env[62914]: _type = "Task" [ 852.362907] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.373267] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831959, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.724422] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70a41024-8ac6-409f-add2-a6a68f55d58c tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 48.166s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.751628] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 567f3d61-ed30-47d9-aebc-77c9392be506] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 852.875687] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831959, 'name': ReconfigVM_Task, 'duration_secs': 0.276386} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.880064] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13659e17-4ac6-4581-aa7c-9fc4d2223cb4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.901815] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:24:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd1b046e-6be2-4ac8-bbb2-0adf61fb18f6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1771667993',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.902164] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.902386] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.902662] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.902918] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.902968] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.903197] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.903370] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.903549] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.903722] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.903911] env[62914]: DEBUG nova.virt.hardware [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.909993] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198f448a-7c82-4261-8418-9818396f24be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.917567] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 852.917567] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249979c-a3b2-5ed3-eb21-707fa7175005" [ 852.917567] env[62914]: _type = "Task" [ 852.917567] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.926313] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249979c-a3b2-5ed3-eb21-707fa7175005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.015656] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "769c3873-7480-47de-894b-40dbf3f2f7f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.016145] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.061919] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updated VIF entry in instance network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 853.062298] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.215041] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a939f9-f2e1-4912-abe4-8bfc287e7b09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.224745] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f12c64-598d-445d-984e-c2a6856580b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.259222] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 5bba4aa5-2b92-42b4-8516-72298a99f0e6] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 853.262719] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706ae658-788c-48d7-a24a-ec53c938a835 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.271664] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f4fab3-8367-447c-813e-10f4faa57f40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.287024] env[62914]: DEBUG nova.compute.provider_tree [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.429349] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5249979c-a3b2-5ed3-eb21-707fa7175005, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.434809] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 853.435110] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91e8a775-2e07-4ced-9f6e-142455c9ea56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.454422] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 853.454422] env[62914]: value = "task-4831960" [ 853.454422] env[62914]: _type = "Task" [ 853.454422] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.462941] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.553523] env[62914]: DEBUG nova.objects.instance [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.565635] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.565919] env[62914]: DEBUG nova.compute.manager [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received event network-changed-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 853.566107] env[62914]: DEBUG nova.compute.manager [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing instance network info cache due to event network-changed-705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 853.566333] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Acquiring lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.566690] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Acquired lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.566903] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 853.762722] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4fbb08f0-6712-4e78-b9da-b33a812ec9b7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 853.790149] env[62914]: DEBUG nova.scheduler.client.report [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.967263] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831960, 'name': ReconfigVM_Task, 'duration_secs': 0.2826} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.967591] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 853.968495] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7e60d9-a761-4cbb-b331-4bc997b3219a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.993069] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.993450] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f11200e-8a0c-4b9c-bcab-763e76a9b29c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.012440] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 854.012440] env[62914]: value = "task-4831961" [ 854.012440] env[62914]: _type = "Task" [ 854.012440] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.022536] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.059935] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.060161] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.060341] env[62914]: DEBUG nova.network.neutron [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.060623] env[62914]: DEBUG nova.objects.instance [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'info_cache' on Instance uuid 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.266355] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 934a0ca3-d879-4b23-90fe-2c190c201a88] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 854.296328] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.296850] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 854.299995] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.708s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.300235] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.302331] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.195s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.304212] env[62914]: INFO nova.compute.claims [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.325396] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updated VIF entry in instance network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 854.325670] env[62914]: DEBUG nova.network.neutron [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [{"id": "705a7360-47b1-4951-92f7-277ca049efa1", "address": "fa:16:3e:be:78:78", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705a7360-47", "ovs_interfaceid": "705a7360-47b1-4951-92f7-277ca049efa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.336939] env[62914]: INFO nova.scheduler.client.report [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Deleted allocations for instance 1d74504f-b641-42c6-a420-c80614d69b23 [ 854.448629] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.448925] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.449196] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.449440] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.449674] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.452462] env[62914]: INFO nova.compute.manager [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Terminating instance [ 854.454450] env[62914]: DEBUG nova.compute.manager [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 854.454676] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.455564] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60543ac2-8b1c-43b2-850f-2be2ffdfb3ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.461073] env[62914]: DEBUG nova.compute.manager [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received event network-changed-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 854.461268] env[62914]: DEBUG nova.compute.manager [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing instance network info cache due to event network-changed-705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 854.461500] env[62914]: DEBUG oslo_concurrency.lockutils [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] Acquiring lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.467010] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 854.467274] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80022770-5b20-4a3d-bc3c-74083f4c615e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.473938] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 854.473938] env[62914]: value = "task-4831962" [ 854.473938] env[62914]: _type = "Task" [ 854.473938] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.483259] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.524619] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.564113] env[62914]: DEBUG nova.objects.base [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Object Instance<2f7bc586-af68-4d9d-81e2-8247371dfa7f> lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 854.770682] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: bfdd7711-d081-42cf-9e4a-2df556d1b72e] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 854.810401] env[62914]: DEBUG nova.compute.utils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.815531] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 854.815966] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 854.829134] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6c47c06-cd11-4738-8a98-c2874851c75a req-92f27fc6-bc47-4668-adb1-24b448b4a02a service nova] Releasing lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.829991] env[62914]: DEBUG oslo_concurrency.lockutils [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] Acquired lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.830222] env[62914]: DEBUG nova.network.neutron [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Refreshing network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 854.848907] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3d6a8449-0313-4833-98c7-2904697fbd3a tempest-ServerRescueTestJSONUnderV235-1448321267 tempest-ServerRescueTestJSONUnderV235-1448321267-project-member] Lock "1d74504f-b641-42c6-a420-c80614d69b23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.898s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.880655] env[62914]: DEBUG nova.policy [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3757ee859d1a4cebbcc504c8c92f6489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1873cee9895d48cb97914fd7ca8392a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 854.985682] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831962, 'name': PowerOffVM_Task, 'duration_secs': 0.45988} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.986074] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.986299] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.986609] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7587fd97-281d-4cca-9542-073639d3ca4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.023511] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831961, 'name': ReconfigVM_Task, 'duration_secs': 0.645856} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.023742] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228/4cea2bd1-a238-4fb6-bc47-719894461228.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.024609] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5a2272-4adf-4403-ac2e-f7999c7318b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.044017] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3121f7da-7aa1-4bee-9dfe-2af8534261f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.064284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdc5972-0a66-4afc-aade-5ca1c1a57056 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.085601] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec09a61c-b8b2-47b8-91ac-874377199d30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.094223] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 855.094549] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfc50e6a-72b4-47f7-9ae5-71f20b0ef160 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.101590] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 855.101590] env[62914]: value = "task-4831964" [ 855.101590] env[62914]: _type = "Task" [ 855.101590] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.110265] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831964, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.158888] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Successfully created port: a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.274560] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1fa01184-1ed2-43de-bcbf-bd8658acc9f9] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 855.320153] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 855.567344] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 855.567619] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 855.567810] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleting the datastore file [datastore1] 397c5401-a435-4170-b07d-a03488c73867 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.572031] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-302dee19-94ad-4b3a-a3ed-39e3ad3db8f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.583138] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 855.583138] env[62914]: value = "task-4831965" [ 855.583138] env[62914]: _type = "Task" [ 855.583138] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.602574] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.615734] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831964, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.625791] env[62914]: DEBUG nova.network.neutron [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updated VIF entry in instance network info cache for port 705a7360-47b1-4951-92f7-277ca049efa1. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 855.626242] env[62914]: DEBUG nova.network.neutron [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [{"id": "705a7360-47b1-4951-92f7-277ca049efa1", "address": "fa:16:3e:be:78:78", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705a7360-47", "ovs_interfaceid": "705a7360-47b1-4951-92f7-277ca049efa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.778629] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: d8d08c36-bec2-4117-9352-8e148d25dc9e] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 855.856202] env[62914]: DEBUG nova.network.neutron [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [{"id": "1c22c510-e137-4ee3-8038-3b784a81e04f", "address": "fa:16:3e:31:c3:24", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c22c510-e1", "ovs_interfaceid": "1c22c510-e137-4ee3-8038-3b784a81e04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.889150] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfa83ee-9466-4972-b8c3-03d6c43bbe21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.897977] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d932a8-f67d-451c-9902-c7f94317f2f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.931460] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a03946-fd43-4d4e-aad9-ae13e76ee9d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.940583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d944081d-586b-4397-b085-e01615d3420f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.956695] env[62914]: DEBUG nova.compute.provider_tree [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.094426] env[62914]: DEBUG oslo_vmware.api [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4831965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381671} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.094730] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.094926] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 856.095123] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 856.095306] env[62914]: INFO nova.compute.manager [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 1.64 seconds to destroy the instance on the hypervisor. [ 856.095566] env[62914]: DEBUG oslo.service.loopingcall [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.095864] env[62914]: DEBUG nova.compute.manager [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 856.095960] env[62914]: DEBUG nova.network.neutron [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 856.112065] env[62914]: DEBUG oslo_vmware.api [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831964, 'name': PowerOnVM_Task, 'duration_secs': 0.94522} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.112385] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 856.129206] env[62914]: DEBUG oslo_concurrency.lockutils [req-3cb19bda-0a78-4f78-9aac-c8336f887f72 req-8b77fb42-fd2f-4970-9e8a-82daf7391b09 service nova] Releasing lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.283890] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 82aab17d-a6d0-48cf-a59a-fbef7d402894] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 856.337560] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 856.358984] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-2f7bc586-af68-4d9d-81e2-8247371dfa7f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.367631] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.367863] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.368028] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.368331] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.368441] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.369021] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.369021] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.369021] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.369295] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.369514] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.369697] env[62914]: DEBUG nova.virt.hardware [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.370652] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9178e1ab-044e-41ff-9564-5716b7111274 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.384987] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d654d462-2b61-42bb-ba95-c21a060758ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.460130] env[62914]: DEBUG nova.scheduler.client.report [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.787571] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1ddb6508-d8fb-4ead-bcb0-370c19bb287d] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 856.863995] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 856.863995] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22ea6ab8-176f-4e35-97eb-42fca9d74b4c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.875039] env[62914]: DEBUG oslo_vmware.api [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 856.875039] env[62914]: value = "task-4831966" [ 856.875039] env[62914]: _type = "Task" [ 856.875039] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.882853] env[62914]: DEBUG oslo_vmware.api [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.893359] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Successfully updated port: a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.966043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.966621] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 856.970018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.513s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.970249] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.972395] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.154s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.973876] env[62914]: INFO nova.compute.claims [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.007591] env[62914]: INFO nova.scheduler.client.report [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Deleted allocations for instance 6bdcd778-0942-41e7-a6fb-7c3413d34ef7 [ 857.128380] env[62914]: INFO nova.compute.manager [None req-a53b2d2f-1910-49c8-819b-3e099ce87293 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance to original state: 'active' [ 857.239885] env[62914]: DEBUG nova.network.neutron [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.293250] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 69a9cd15-7d6f-464d-b451-e193179088f7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 857.359155] env[62914]: DEBUG nova.compute.manager [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 857.359693] env[62914]: DEBUG nova.compute.manager [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing instance network info cache due to event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 857.359972] env[62914]: DEBUG oslo_concurrency.lockutils [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.360189] env[62914]: DEBUG oslo_concurrency.lockutils [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.360738] env[62914]: DEBUG nova.network.neutron [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 857.385621] env[62914]: DEBUG oslo_vmware.api [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831966, 'name': PowerOnVM_Task, 'duration_secs': 0.452439} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.385621] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 857.385831] env[62914]: DEBUG nova.compute.manager [None req-23da5f5c-2a90-4815-ae76-cc21213fcab5 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 857.386666] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e33cf76-2133-4bbe-8ac7-204c22cfeb3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.396917] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.397087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.397461] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.423110] env[62914]: DEBUG nova.compute.manager [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Received event network-vif-plugged-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 857.424057] env[62914]: DEBUG oslo_concurrency.lockutils [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] Acquiring lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.424504] env[62914]: DEBUG oslo_concurrency.lockutils [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.425133] env[62914]: DEBUG oslo_concurrency.lockutils [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.425563] env[62914]: DEBUG nova.compute.manager [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] No waiting events found dispatching network-vif-plugged-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 857.426199] env[62914]: WARNING nova.compute.manager [req-876e440e-63e3-423c-82b6-a1997ac01eb2 req-870f9cea-5937-4683-9082-cd9ac8fd27f1 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Received unexpected event network-vif-plugged-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 for instance with vm_state building and task_state spawning. [ 857.486365] env[62914]: DEBUG nova.compute.utils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.490618] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 857.491013] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 857.522027] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd1b9022-c69f-41c1-9880-26bd3ad1b4c3 tempest-ServerRescueTestJSON-1768795781 tempest-ServerRescueTestJSON-1768795781-project-member] Lock "6bdcd778-0942-41e7-a6fb-7c3413d34ef7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.620s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.561185] env[62914]: DEBUG nova.policy [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 857.744138] env[62914]: INFO nova.compute.manager [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 1.65 seconds to deallocate network for instance. [ 857.804248] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 70a6d3e7-6928-47a7-9f7f-bd5dad64912f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 857.966634] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.993737] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 858.253751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.275702] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Successfully created port: a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.305427] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ff2cff97-1671-4f97-8f69-532253169ff8] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 858.384744] env[62914]: DEBUG nova.network.neutron [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updated VIF entry in instance network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 858.385052] env[62914]: DEBUG nova.network.neutron [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.455472] env[62914]: DEBUG nova.network.neutron [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Updating instance_info_cache with network_info: [{"id": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "address": "fa:16:3e:9a:bf:19", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0474c06-0c", "ovs_interfaceid": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.638366] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e993a2d6-a420-4f18-83b2-c7d4daad9491 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.650088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13466c82-7715-41ed-b0b5-511bebf427df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.687166] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f30e2b6-ee5b-49bc-9f37-c7c7cd304cc6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.696412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b0c200-c38f-41d0-9ee3-c82b7cc1676a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.713342] env[62914]: DEBUG nova.compute.provider_tree [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.812198] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ef521e82-38ab-4d62-b434-da7f7fa8c50f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 858.888882] env[62914]: DEBUG oslo_concurrency.lockutils [req-c9ce3438-c93f-47b1-a8ce-9db81f2454c5 req-a6e8c518-1c8c-4548-a95b-c2dc5e88109a service nova] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.957856] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.958224] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Instance network_info: |[{"id": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "address": "fa:16:3e:9a:bf:19", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0474c06-0c", "ovs_interfaceid": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 858.958769] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:bf:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0474c06-0c2f-4fe9-8636-c16aa5b3bca8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.966523] env[62914]: DEBUG oslo.service.loopingcall [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.966778] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 858.967107] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b16d616e-ccf8-4631-ae48-ea220994ce78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.992995] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.992995] env[62914]: value = "task-4831967" [ 858.992995] env[62914]: _type = "Task" [ 858.992995] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.005182] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831967, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.017889] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 859.056361] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 859.056689] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 859.056850] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.057190] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 859.057386] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.057587] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 859.058096] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 859.058221] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 859.058368] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 859.058565] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 859.058782] env[62914]: DEBUG nova.virt.hardware [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 859.059760] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1260183c-eb4b-4de5-a195-586cc9259bcb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.071786] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "4cea2bd1-a238-4fb6-bc47-719894461228" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.072599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.073411] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.073411] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.073573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.078788] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed10808c-8083-4d1d-963a-b3b0d5068983 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.085968] env[62914]: INFO nova.compute.manager [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Terminating instance [ 859.092794] env[62914]: DEBUG nova.compute.manager [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 859.093125] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 859.093946] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2923f616-680c-454c-afe6-c8d64ae6810c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.114857] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 859.115541] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e4ad433-ef5a-423e-89b4-b4d136abfc5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.123252] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 859.123252] env[62914]: value = "task-4831968" [ 859.123252] env[62914]: _type = "Task" [ 859.123252] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.136491] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.218022] env[62914]: DEBUG nova.scheduler.client.report [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.316395] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 9ce44ae9-9369-4c0c-9d14-9c8fde42d612] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 859.504285] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831967, 'name': CreateVM_Task, 'duration_secs': 0.432936} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.504285] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 859.504785] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.504949] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.505329] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.505613] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-459a4108-84f0-472d-a7c7-b8b4af27b122 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.512530] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 859.512530] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f2fd89-9627-86a1-5943-49e9b81ab49e" [ 859.512530] env[62914]: _type = "Task" [ 859.512530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.521910] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f2fd89-9627-86a1-5943-49e9b81ab49e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.636103] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831968, 'name': PowerOffVM_Task, 'duration_secs': 0.300728} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.636583] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 859.637022] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 859.637288] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0189f601-70e0-4d5f-9d94-42dbb6555250 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.703869] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 859.704501] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 859.704501] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleting the datastore file [datastore2] 4cea2bd1-a238-4fb6-bc47-719894461228 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.704823] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc124330-e290-4d4a-a907-50163ca64255 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.712268] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 859.712268] env[62914]: value = "task-4831970" [ 859.712268] env[62914]: _type = "Task" [ 859.712268] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.724409] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.724793] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 859.727726] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.727894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.728155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.730360] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.844s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.731879] env[62914]: INFO nova.compute.claims [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.776150] env[62914]: INFO nova.scheduler.client.report [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Deleted allocations for instance 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776 [ 859.821107] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 6a9c973f-8aea-4403-9fa2-d37e5eec1ee1] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 860.028862] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.029152] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.029369] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.029560] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.029736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.034574] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f2fd89-9627-86a1-5943-49e9b81ab49e, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.034887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.035133] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.035360] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.035513] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.035702] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.035976] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43a1026b-c418-4e07-b943-01a8663c6030 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.039055] env[62914]: INFO nova.compute.manager [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Terminating instance [ 860.041317] env[62914]: DEBUG nova.compute.manager [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 860.041515] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 860.043214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b453c13a-13e3-4d2f-86b1-e29898cbd092 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.046799] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.046999] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 860.051087] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-557e11e5-61d2-4756-a766-cb54057f2224 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.052438] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 860.054473] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a99e1e7c-da91-4ab9-affa-e6349b028217 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.056114] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 860.056114] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525b7bb9-00fa-bc24-62e6-abce0d7c540c" [ 860.056114] env[62914]: _type = "Task" [ 860.056114] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.060577] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 860.060577] env[62914]: value = "task-4831971" [ 860.060577] env[62914]: _type = "Task" [ 860.060577] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.067041] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525b7bb9-00fa-bc24-62e6-abce0d7c540c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.072948] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.224731] env[62914]: DEBUG oslo_vmware.api [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4831970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167995} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.225144] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.225360] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 860.225556] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 860.225741] env[62914]: INFO nova.compute.manager [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Took 1.13 seconds to destroy the instance on the hypervisor. [ 860.226013] env[62914]: DEBUG oslo.service.loopingcall [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.226278] env[62914]: DEBUG nova.compute.manager [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 860.226492] env[62914]: DEBUG nova.network.neutron [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 860.238280] env[62914]: DEBUG nova.compute.utils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.244289] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 860.244289] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 860.290110] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3323ba41-b6f4-4ff7-a6f6-eee1a763a9ad tempest-ServerAddressesTestJSON-1238269399 tempest-ServerAddressesTestJSON-1238269399-project-member] Lock "45644d9d-1d7d-4c2c-825d-fb3a2f6f2776" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.537s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.325132] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 6fd5f3b8-1175-4bd5-b0b4-12517ba65271] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 860.333178] env[62914]: DEBUG nova.policy [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1305ed7d6c28421e93b3a8e31739df7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cda9ee54ad14f479838a54276dac349', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.341890] env[62914]: DEBUG nova.compute.manager [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 397c5401-a435-4170-b07d-a03488c73867] Received event network-vif-deleted-705a7360-47b1-4951-92f7-277ca049efa1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 860.342152] env[62914]: DEBUG nova.compute.manager [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 860.342327] env[62914]: DEBUG nova.compute.manager [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing instance network info cache due to event network-changed-691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 860.342559] env[62914]: DEBUG oslo_concurrency.lockutils [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] Acquiring lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.342678] env[62914]: DEBUG oslo_concurrency.lockutils [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] Acquired lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.342845] env[62914]: DEBUG nova.network.neutron [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Refreshing network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 860.367486] env[62914]: DEBUG nova.compute.manager [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Received event network-changed-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 860.367687] env[62914]: DEBUG nova.compute.manager [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Refreshing instance network info cache due to event network-changed-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 860.367908] env[62914]: DEBUG oslo_concurrency.lockutils [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] Acquiring lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.368076] env[62914]: DEBUG oslo_concurrency.lockutils [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] Acquired lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.368325] env[62914]: DEBUG nova.network.neutron [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Refreshing network info cache for port a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 860.572726] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525b7bb9-00fa-bc24-62e6-abce0d7c540c, 'name': SearchDatastore_Task, 'duration_secs': 0.010577} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.577652] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831971, 'name': PowerOffVM_Task, 'duration_secs': 0.226097} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.577972] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11a688d2-8d5a-47b4-926a-615feb3f421f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.584779] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 860.584987] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 860.585603] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b871d8f9-e26e-489c-b04c-d4ba430ae4da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.595022] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 860.595022] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527d0a8d-acc2-3e5b-a434-73029dd16ccf" [ 860.595022] env[62914]: _type = "Task" [ 860.595022] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.609597] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527d0a8d-acc2-3e5b-a434-73029dd16ccf, 'name': SearchDatastore_Task, 'duration_secs': 0.011278} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.610360] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.610742] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 32e8f18e-2116-43bd-9951-ad809ab95ba2/32e8f18e-2116-43bd-9951-ad809ab95ba2.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 860.611107] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a154eb2f-5cf8-4242-9686-a9015aeb0ff3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.622051] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 860.622051] env[62914]: value = "task-4831973" [ 860.622051] env[62914]: _type = "Task" [ 860.622051] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.634077] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.657368] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 860.657849] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 860.658370] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleting the datastore file [datastore2] 2f7bc586-af68-4d9d-81e2-8247371dfa7f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.658370] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bd69b6e-b010-49b4-b308-eb5a4e5def68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.668070] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 860.668070] env[62914]: value = "task-4831974" [ 860.668070] env[62914]: _type = "Task" [ 860.668070] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.678495] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.742403] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Successfully updated port: a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.744240] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 860.829872] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ea214cc0-0f7a-4aee-9906-8d47e660c8f7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 860.943279] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.997762] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Successfully created port: aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.138275] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831973, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.174123] env[62914]: DEBUG nova.network.neutron [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.186545] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.254078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.254401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.254401] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 861.313503] env[62914]: DEBUG nova.network.neutron [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updated VIF entry in instance network info cache for port 691c01fe-1d59-431c-9474-7726ec537a5b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 861.313908] env[62914]: DEBUG nova.network.neutron [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [{"id": "691c01fe-1d59-431c-9474-7726ec537a5b", "address": "fa:16:3e:9a:2c:53", "network": {"id": "5a4f9408-b20e-4901-8403-9f73014f03f8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1059492920-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ba9ece80a24353ac072b643cb16df7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691c01fe-1d", "ovs_interfaceid": "691c01fe-1d59-431c-9474-7726ec537a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.317403] env[62914]: DEBUG nova.network.neutron [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Updated VIF entry in instance network info cache for port a0474c06-0c2f-4fe9-8636-c16aa5b3bca8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 861.317758] env[62914]: DEBUG nova.network.neutron [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Updating instance_info_cache with network_info: [{"id": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "address": "fa:16:3e:9a:bf:19", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0474c06-0c", "ovs_interfaceid": "a0474c06-0c2f-4fe9-8636-c16aa5b3bca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.333418] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e69c36e9-3c59-48e3-9962-ffe8de10a789] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 861.442577] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eddc64b-8654-404f-883a-9379f703bf90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.453412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1ef153-22d4-45c7-8010-efa7a8a3ad52 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.492390] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b395e327-7921-44f9-8209-29de4beec571 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.503837] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c53dc91-6a8d-42f0-a8aa-2a0f65805a6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.521792] env[62914]: DEBUG nova.compute.provider_tree [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.638766] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604205} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.639240] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 32e8f18e-2116-43bd-9951-ad809ab95ba2/32e8f18e-2116-43bd-9951-ad809ab95ba2.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 861.639431] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.639671] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af8e0744-c0fc-4c58-87c4-25513eb70b04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.647956] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 861.647956] env[62914]: value = "task-4831975" [ 861.647956] env[62914]: _type = "Task" [ 861.647956] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.661788] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.676843] env[62914]: INFO nova.compute.manager [-] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Took 1.45 seconds to deallocate network for instance. [ 861.684934] env[62914]: DEBUG oslo_vmware.api [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4831974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.540461} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.688760] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.689010] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 861.689318] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 861.689629] env[62914]: INFO nova.compute.manager [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 1.65 seconds to destroy the instance on the hypervisor. [ 861.690154] env[62914]: DEBUG oslo.service.loopingcall [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.690480] env[62914]: DEBUG nova.compute.manager [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 861.690622] env[62914]: DEBUG nova.network.neutron [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 861.764637] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 861.799498] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.799740] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.799901] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.800156] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.800311] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.800496] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.800717] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.800877] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.801051] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.801215] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.801385] env[62914]: DEBUG nova.virt.hardware [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.802280] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b16166-ac29-4506-9677-7aa8999cb81c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.811103] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac044a8a-b0f2-47fd-b981-d28c6952867c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.816475] env[62914]: DEBUG oslo_concurrency.lockutils [req-c3de3795-1634-42ad-bc21-4af6ca6474de req-902bec12-52a5-4663-a84b-58fc35b7c144 service nova] Releasing lock "refresh_cache-7d8287f9-10be-4834-8b7a-1b764145d1c3" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.827831] env[62914]: DEBUG oslo_concurrency.lockutils [req-cf1d6f2e-f928-4a7c-8353-8408364d9292 req-e0e87136-c79c-4b90-9ad1-be85de3457a6 service nova] Releasing lock "refresh_cache-32e8f18e-2116-43bd-9951-ad809ab95ba2" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.835638] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 861.841460] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 2927d5a5-ae7f-4c3c-a931-4dca9cedfbeb] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 861.909591] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.910370] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.910778] env[62914]: INFO nova.compute.manager [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Shelving [ 862.027181] env[62914]: DEBUG nova.scheduler.client.report [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.159581] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076955} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.160394] env[62914]: DEBUG nova.network.neutron [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Updating instance_info_cache with network_info: [{"id": "a6bab462-94fa-4095-9dbf-83474e5057dd", "address": "fa:16:3e:c9:e6:cc", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bab462-94", "ovs_interfaceid": "a6bab462-94fa-4095-9dbf-83474e5057dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.162513] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 862.163773] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3634b0ad-79c3-4903-a336-787af3ce6837 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.196526] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 32e8f18e-2116-43bd-9951-ad809ab95ba2/32e8f18e-2116-43bd-9951-ad809ab95ba2.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.199024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.199024] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00fa0bc8-a644-4d5b-b944-bb2565b29476 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.221511] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 862.221511] env[62914]: value = "task-4831976" [ 862.221511] env[62914]: _type = "Task" [ 862.221511] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.234360] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831976, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.345136] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e1018767-71e4-49c9-bd4d-02eae39dc26b] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 862.419990] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 862.420084] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efc68649-4d03-4a63-a21b-97dea54787f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.429014] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 862.429014] env[62914]: value = "task-4831977" [ 862.429014] env[62914]: _type = "Task" [ 862.429014] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.439693] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.485015] env[62914]: DEBUG nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Received event network-vif-plugged-a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 862.485552] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Acquiring lock "982936be-3cb1-4930-b135-8fc2019c5216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.485850] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Lock "982936be-3cb1-4930-b135-8fc2019c5216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.486100] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Lock "982936be-3cb1-4930-b135-8fc2019c5216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.486328] env[62914]: DEBUG nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] No waiting events found dispatching network-vif-plugged-a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 862.486564] env[62914]: WARNING nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Received unexpected event network-vif-plugged-a6bab462-94fa-4095-9dbf-83474e5057dd for instance with vm_state building and task_state spawning. [ 862.486807] env[62914]: DEBUG nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Received event network-vif-deleted-c148a862-a6a8-4c52-b1df-8e764ee00e94 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 862.487161] env[62914]: DEBUG nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Received event network-changed-a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 862.487396] env[62914]: DEBUG nova.compute.manager [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Refreshing instance network info cache due to event network-changed-a6bab462-94fa-4095-9dbf-83474e5057dd. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 862.487825] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Acquiring lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.534162] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.804s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.534833] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 862.539430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.145s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.541872] env[62914]: INFO nova.compute.claims [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.665502] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.665823] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Instance network_info: |[{"id": "a6bab462-94fa-4095-9dbf-83474e5057dd", "address": "fa:16:3e:c9:e6:cc", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bab462-94", "ovs_interfaceid": "a6bab462-94fa-4095-9dbf-83474e5057dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 862.667412] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Acquired lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.667669] env[62914]: DEBUG nova.network.neutron [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Refreshing network info cache for port a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 862.669213] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:e6:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6bab462-94fa-4095-9dbf-83474e5057dd', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.683024] env[62914]: DEBUG oslo.service.loopingcall [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.689519] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 862.690853] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76c48683-f8c4-49fc-b429-568e0eb0566f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.713058] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.713058] env[62914]: value = "task-4831978" [ 862.713058] env[62914]: _type = "Task" [ 862.713058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.729241] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831978, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.735804] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831976, 'name': ReconfigVM_Task, 'duration_secs': 0.360823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.736287] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 32e8f18e-2116-43bd-9951-ad809ab95ba2/32e8f18e-2116-43bd-9951-ad809ab95ba2.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.737109] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8c854e5-3670-4804-80cb-27a2228f3fb0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.750072] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 862.750072] env[62914]: value = "task-4831979" [ 862.750072] env[62914]: _type = "Task" [ 862.750072] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.760166] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831979, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.849911] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: a40c2ecb-918d-44ca-ad4c-d0d3fc4e8494] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 862.939091] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831977, 'name': PowerOffVM_Task, 'duration_secs': 0.318173} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.939444] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 862.940325] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5978b2-2a9e-414c-973a-80643dd1a421 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.962671] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91777e6-f793-497d-b903-71f67653c79c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.975176] env[62914]: DEBUG nova.compute.manager [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received event network-vif-plugged-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 862.975369] env[62914]: DEBUG oslo_concurrency.lockutils [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] Acquiring lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.975592] env[62914]: DEBUG oslo_concurrency.lockutils [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.975779] env[62914]: DEBUG oslo_concurrency.lockutils [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.975951] env[62914]: DEBUG nova.compute.manager [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] No waiting events found dispatching network-vif-plugged-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 862.976134] env[62914]: WARNING nova.compute.manager [req-345f1c21-83b3-4a94-af31-0cef2bbe6ccb req-faad3445-08e1-488a-9db8-0afdd3099ec2 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received unexpected event network-vif-plugged-aac4511b-c3e4-44ce-8c77-1400d0526d22 for instance with vm_state building and task_state spawning. [ 863.048824] env[62914]: DEBUG nova.compute.utils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.050553] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 863.053723] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 863.144877] env[62914]: DEBUG nova.network.neutron [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Updated VIF entry in instance network info cache for port a6bab462-94fa-4095-9dbf-83474e5057dd. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 863.145279] env[62914]: DEBUG nova.network.neutron [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Updating instance_info_cache with network_info: [{"id": "a6bab462-94fa-4095-9dbf-83474e5057dd", "address": "fa:16:3e:c9:e6:cc", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bab462-94", "ovs_interfaceid": "a6bab462-94fa-4095-9dbf-83474e5057dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.164733] env[62914]: DEBUG nova.policy [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbbfc60f75264022a6f9cd5a27d5f65f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4d16d60c1fb4848a299817c62263bf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.180094] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Successfully updated port: aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.182200] env[62914]: DEBUG nova.network.neutron [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.226963] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831978, 'name': CreateVM_Task, 'duration_secs': 0.374319} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.227171] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 863.228714] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.228879] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.229850] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 863.229850] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f11154a-8fdf-43da-b5a7-50ee995f3e3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.236326] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 863.236326] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f855d7-048d-639a-8c73-88c970126323" [ 863.236326] env[62914]: _type = "Task" [ 863.236326] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.247915] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f855d7-048d-639a-8c73-88c970126323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.260599] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831979, 'name': Rename_Task, 'duration_secs': 0.168175} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.261228] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 863.261648] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c22b84cd-2d76-4a24-932f-889bbdf53001 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.272236] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 863.272236] env[62914]: value = "task-4831980" [ 863.272236] env[62914]: _type = "Task" [ 863.272236] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.280245] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.356528] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 9e39cfb8-e277-4798-92b0-b54f310ef2f4] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 863.478764] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 863.479433] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cf24dbd9-d992-4bae-a697-77bedaa0f69f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.491826] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 863.491826] env[62914]: value = "task-4831981" [ 863.491826] env[62914]: _type = "Task" [ 863.491826] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.502947] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831981, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.556378] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 863.652155] env[62914]: DEBUG oslo_concurrency.lockutils [req-91940db7-646b-4c38-8e13-53c2f4358486 req-4aad2b90-29e9-48ac-91c9-cf7067e4d087 service nova] Releasing lock "refresh_cache-982936be-3cb1-4930-b135-8fc2019c5216" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.685088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.685088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.685088] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.687256] env[62914]: INFO nova.compute.manager [-] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Took 2.00 seconds to deallocate network for instance. [ 863.751343] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f855d7-048d-639a-8c73-88c970126323, 'name': SearchDatastore_Task, 'duration_secs': 0.023486} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.751822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.752140] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.752507] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.752584] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.752778] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.753107] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b274c95-c5eb-4092-9be9-c2f734bb1eb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.765144] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.765387] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 863.766434] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a18c4183-a1d9-4494-9fcd-13afa7712569 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.779377] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 863.779377] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b36daa-6304-f79b-025b-b95dd0c08b1b" [ 863.779377] env[62914]: _type = "Task" [ 863.779377] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.788308] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831980, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.798759] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b36daa-6304-f79b-025b-b95dd0c08b1b, 'name': SearchDatastore_Task, 'duration_secs': 0.014954} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.800017] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e652c194-e7bf-494b-b449-69f27d1f64d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.808253] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 863.808253] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9a34b-7dc5-cd22-4495-17bce74f9f27" [ 863.808253] env[62914]: _type = "Task" [ 863.808253] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.824355] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9a34b-7dc5-cd22-4495-17bce74f9f27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.863164] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: db31a794-3928-41bb-afd8-14fae9357654] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 863.968902] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Successfully created port: 38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.006833] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831981, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.196049] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.218048] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b6cd1f-5699-426e-a835-c747fff556d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.229721] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8077a363-5760-459f-90e2-9643ddd6713f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.273597] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.278919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f71ef7-8242-4643-8e9e-7068a446a0d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.288286] env[62914]: DEBUG oslo_vmware.api [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831980, 'name': PowerOnVM_Task, 'duration_secs': 0.529696} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.290746] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 864.291187] env[62914]: INFO nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Took 7.95 seconds to spawn the instance on the hypervisor. [ 864.291187] env[62914]: DEBUG nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 864.292166] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea68b58-e62e-4ba2-9ee2-5e1b8cce3b76 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.295848] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49adcbd5-97e6-4d37-95b9-c0b39b13a16a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.315024] env[62914]: DEBUG nova.compute.provider_tree [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.329464] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9a34b-7dc5-cd22-4495-17bce74f9f27, 'name': SearchDatastore_Task, 'duration_secs': 0.016276} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.329464] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.329464] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 982936be-3cb1-4930-b135-8fc2019c5216/982936be-3cb1-4930-b135-8fc2019c5216.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 864.329464] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f09b5e2-88e3-4725-b3d7-1df1a76a0a5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.339240] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 864.339240] env[62914]: value = "task-4831982" [ 864.339240] env[62914]: _type = "Task" [ 864.339240] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.350234] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.366826] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 61e36e7b-aaa1-420e-bd43-f0184b56581b] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 864.507135] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831981, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.572398] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 864.574246] env[62914]: DEBUG nova.network.neutron [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.620087] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.621139] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.621139] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.621716] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.622075] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.622420] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.623143] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.624049] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.624254] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.624515] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.624754] env[62914]: DEBUG nova.virt.hardware [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.626699] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b79b5-c88f-40cd-ae42-992939fc730a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.641261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ea29ba-5ec8-41e1-a42a-f53bc9ff357d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.692780] env[62914]: DEBUG nova.compute.manager [req-e888130b-ccc5-418a-905c-72241e2b5324 req-4fc72b60-cc93-405a-9b02-ed415de6ac27 service nova] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Received event network-vif-deleted-1c22c510-e137-4ee3-8038-3b784a81e04f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 864.820708] env[62914]: DEBUG nova.scheduler.client.report [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.831662] env[62914]: INFO nova.compute.manager [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Took 57.13 seconds to build instance. [ 864.857371] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831982, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.873213] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: a4fca617-da38-4913-b2c8-a2921da6db56] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 865.007098] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831981, 'name': CreateSnapshot_Task, 'duration_secs': 1.15791} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.007420] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 865.008361] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513ac5c0-7150-443c-8bf1-8feb923284c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.077921] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.078292] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Instance network_info: |[{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 865.079040] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:a6:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bf86b133-2b7b-4cab-8f6f-5a0856d34c7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aac4511b-c3e4-44ce-8c77-1400d0526d22', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.086647] env[62914]: DEBUG oslo.service.loopingcall [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.087739] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 865.087739] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d017270-b84e-4970-af51-44e11ab7cf0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.108138] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.108138] env[62914]: value = "task-4831983" [ 865.108138] env[62914]: _type = "Task" [ 865.108138] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.117057] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831983, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.132600] env[62914]: DEBUG nova.compute.manager [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 865.132823] env[62914]: DEBUG nova.compute.manager [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing instance network info cache due to event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 865.133252] env[62914]: DEBUG oslo_concurrency.lockutils [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] Acquiring lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.133450] env[62914]: DEBUG oslo_concurrency.lockutils [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] Acquired lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.133628] env[62914]: DEBUG nova.network.neutron [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 865.331489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.792s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.332085] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 865.335212] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.432s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.335444] env[62914]: DEBUG nova.objects.instance [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lazy-loading 'resources' on Instance uuid 1ec89a28-d4f3-4324-bf14-c99c5ce05950 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.336799] env[62914]: DEBUG oslo_concurrency.lockutils [None req-43494bd3-f31a-4477-9e1c-2f54fac48903 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.276s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.356020] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658544} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.356020] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 982936be-3cb1-4930-b135-8fc2019c5216/982936be-3cb1-4930-b135-8fc2019c5216.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 865.356020] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.356020] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b10e5cfb-e8e7-415d-8e71-1876bf55b02c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.365864] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 865.365864] env[62914]: value = "task-4831984" [ 865.365864] env[62914]: _type = "Task" [ 865.365864] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.376811] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 52097338-887e-4c79-8413-abfd7ea26c96] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 865.378739] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.531852] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 865.532372] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.532617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.532836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.533036] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.533228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.534959] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-05a0a60d-ec50-4d66-90ea-06ca04963e6f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.538660] env[62914]: INFO nova.compute.manager [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Terminating instance [ 865.540727] env[62914]: DEBUG nova.compute.manager [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 865.540931] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 865.542750] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2682d05-5111-4e8f-8354-c253e2f6fc7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.547957] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 865.547957] env[62914]: value = "task-4831985" [ 865.547957] env[62914]: _type = "Task" [ 865.547957] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.554163] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 865.555017] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9021e9da-5337-4bed-916e-f1fc67439204 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.560366] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.565788] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 865.565788] env[62914]: value = "task-4831986" [ 865.565788] env[62914]: _type = "Task" [ 865.565788] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.576636] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.619511] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831983, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.630942] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Successfully updated port: 38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.696900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.697214] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.697450] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.697648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.697825] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.701404] env[62914]: INFO nova.compute.manager [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Terminating instance [ 865.703689] env[62914]: DEBUG nova.compute.manager [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 865.703896] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 865.704755] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a179a2-baba-4f9e-a6df-966a93b6c430 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.713230] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 865.713848] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82fd099c-253d-478f-b860-9066bfbca8bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.721570] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 865.721570] env[62914]: value = "task-4831987" [ 865.721570] env[62914]: _type = "Task" [ 865.721570] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.733619] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831987, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.841702] env[62914]: DEBUG nova.compute.utils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 865.842798] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 865.849357] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 865.849725] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 865.882612] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229203} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.885673] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.886236] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3eff61b1-b09c-4a04-821c-cefdc7be3f64] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 865.889809] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28adcd8f-ebaa-4c2e-b8cc-7308a81e0f3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.926554] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 982936be-3cb1-4930-b135-8fc2019c5216/982936be-3cb1-4930-b135-8fc2019c5216.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.929429] env[62914]: DEBUG nova.network.neutron [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updated VIF entry in instance network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 865.929907] env[62914]: DEBUG nova.network.neutron [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.936197] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9df06474-9034-46c0-8bdf-dce98c61e6cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.954113] env[62914]: DEBUG oslo_concurrency.lockutils [req-d5943d12-3949-4230-b759-0eed028001ff req-af211cec-ac3a-4eff-be97-f17525f71705 service nova] Releasing lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.960725] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 865.960725] env[62914]: value = "task-4831988" [ 865.960725] env[62914]: _type = "Task" [ 865.960725] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.968849] env[62914]: DEBUG nova.policy [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbbfc60f75264022a6f9cd5a27d5f65f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4d16d60c1fb4848a299817c62263bf1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 865.978316] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831988, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.061488] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.079091] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.121577] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831983, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.134118] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.134277] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.134432] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.239205] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831987, 'name': PowerOffVM_Task, 'duration_secs': 0.220272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.239591] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 866.239804] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 866.240138] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7050113-6cf5-4af8-aac4-3c960ce659cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.329068] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 866.329396] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 866.329617] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleting the datastore file [datastore2] 32e8f18e-2116-43bd-9951-ad809ab95ba2 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.329935] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fd3884d-b086-4e4c-9ecf-c03aee7a7677 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.339691] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 866.339691] env[62914]: value = "task-4831990" [ 866.339691] env[62914]: _type = "Task" [ 866.339691] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.359286] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 866.363229] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.382424] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.400052] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 43edad1f-cff0-4d3c-a721-98277d1cddc2] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 866.476237] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.493093] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef89df4-ecf8-46c4-b3bd-0fad4e7166f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.503739] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c84b2d6-f666-4cfa-8431-ac383ad5da5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.548055] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f61058-2b45-486e-8c30-ca08cac83023 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.563528] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb96ebe1-8f7e-4c09-9410-1b881e936632 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.568188] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.583602] env[62914]: DEBUG nova.compute.provider_tree [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.591361] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831986, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.592476] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Successfully created port: cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.624065] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831983, 'name': CreateVM_Task, 'duration_secs': 1.402007} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.624296] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 866.625900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.626137] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.626497] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.627057] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e955176-129f-4908-9513-b0de767f15cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.634686] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 866.634686] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e0bba-4597-ac14-44dd-49bf2716ea49" [ 866.634686] env[62914]: _type = "Task" [ 866.634686] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.651776] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e0bba-4597-ac14-44dd-49bf2716ea49, 'name': SearchDatastore_Task, 'duration_secs': 0.012569} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.652190] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.652455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.652744] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.652886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.653487] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.654629] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6e44777-ae7a-42d1-9e77-2ed4f6fe955d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.667889] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.667889] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 866.667889] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25d2cd85-db8f-4b9f-9167-7f3c272c699c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.676122] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 866.676122] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a5278-0f3b-789c-e845-9b64b2ea1ab2" [ 866.676122] env[62914]: _type = "Task" [ 866.676122] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.687827] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a5278-0f3b-789c-e845-9b64b2ea1ab2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.689325] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.757852] env[62914]: DEBUG nova.compute.manager [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Received event network-vif-plugged-38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 866.759858] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Acquiring lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.759858] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.759858] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.759858] env[62914]: DEBUG nova.compute.manager [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] No waiting events found dispatching network-vif-plugged-38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.759858] env[62914]: WARNING nova.compute.manager [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Received unexpected event network-vif-plugged-38e304a6-f447-44af-8c07-955d9d6a842f for instance with vm_state building and task_state spawning. [ 866.759858] env[62914]: DEBUG nova.compute.manager [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Received event network-changed-38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 866.759858] env[62914]: DEBUG nova.compute.manager [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Refreshing instance network info cache due to event network-changed-38e304a6-f447-44af-8c07-955d9d6a842f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 866.759858] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Acquiring lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.850225] env[62914]: DEBUG oslo_vmware.api [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18778} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.850497] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 866.850609] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 866.850758] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 866.850933] env[62914]: INFO nova.compute.manager [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 866.851190] env[62914]: DEBUG oslo.service.loopingcall [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.851440] env[62914]: DEBUG nova.compute.manager [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 866.851521] env[62914]: DEBUG nova.network.neutron [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 866.877525] env[62914]: DEBUG nova.network.neutron [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Updating instance_info_cache with network_info: [{"id": "38e304a6-f447-44af-8c07-955d9d6a842f", "address": "fa:16:3e:46:fb:cb", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38e304a6-f4", "ovs_interfaceid": "38e304a6-f447-44af-8c07-955d9d6a842f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.906176] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.906344] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Cleaning up deleted instances with incomplete migration {{(pid=62914) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11342}} [ 866.973709] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831988, 'name': ReconfigVM_Task, 'duration_secs': 0.763656} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.974162] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 982936be-3cb1-4930-b135-8fc2019c5216/982936be-3cb1-4930-b135-8fc2019c5216.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.974895] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf5f5efd-0042-470e-916b-a7792210156e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.983582] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 866.983582] env[62914]: value = "task-4831991" [ 866.983582] env[62914]: _type = "Task" [ 866.983582] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.993843] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831991, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.063675] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.082086] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831986, 'name': PowerOffVM_Task, 'duration_secs': 1.090861} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.082558] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 867.082863] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 867.083262] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40f70f9a-3716-4f2a-b296-0a6fdad0edff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.087412] env[62914]: DEBUG nova.scheduler.client.report [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.157467] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 867.157695] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 867.157908] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleting the datastore file [datastore2] bd81fcb7-abef-4b86-8dce-f07b1c226f2f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.158301] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28c3e705-b673-4ec8-9de8-cb7d83594d4c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.170038] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 867.170038] env[62914]: value = "task-4831993" [ 867.170038] env[62914]: _type = "Task" [ 867.170038] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.193312] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.197128] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a5278-0f3b-789c-e845-9b64b2ea1ab2, 'name': SearchDatastore_Task, 'duration_secs': 0.013689} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.198388] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ef69c9-155a-42a6-9fe6-161e23335317 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.204902] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 867.204902] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce9299-3290-b997-9142-d5d8f8c8c3af" [ 867.204902] env[62914]: _type = "Task" [ 867.204902] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.215720] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce9299-3290-b997-9142-d5d8f8c8c3af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.378666] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 867.381041] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.381364] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Instance network_info: |[{"id": "38e304a6-f447-44af-8c07-955d9d6a842f", "address": "fa:16:3e:46:fb:cb", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38e304a6-f4", "ovs_interfaceid": "38e304a6-f447-44af-8c07-955d9d6a842f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 867.381804] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Acquired lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.381845] env[62914]: DEBUG nova.network.neutron [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Refreshing network info cache for port 38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 867.382997] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:fb:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38e304a6-f447-44af-8c07-955d9d6a842f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.391768] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating folder: Project (f4d16d60c1fb4848a299817c62263bf1). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 867.392752] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff100b57-b112-434a-92e4-823642ed8afb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.406906] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Created folder: Project (f4d16d60c1fb4848a299817c62263bf1) in parent group-v941773. [ 867.406906] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating folder: Instances. Parent ref: group-v941959. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 867.406906] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7813f990-697f-4794-90fd-055f9311b303 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.410953] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 867.411260] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 867.411449] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.411624] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 867.411824] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.411925] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 867.412152] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 867.412320] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 867.412498] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 867.412673] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 867.412856] env[62914]: DEBUG nova.virt.hardware [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.413945] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d93fe9f-6db7-4708-8217-0bf52a886af3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.416757] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.425904] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca788e05-78f2-4774-8f1b-34c50cf7cc87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.432715] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Created folder: Instances in parent group-v941959. [ 867.433015] env[62914]: DEBUG oslo.service.loopingcall [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.433642] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 867.434191] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16ada91e-0a18-4ac0-8e0e-f3f49255b704 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.466131] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.466131] env[62914]: value = "task-4831996" [ 867.466131] env[62914]: _type = "Task" [ 867.466131] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.476586] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831996, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.494449] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831991, 'name': Rename_Task, 'duration_secs': 0.152032} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.494952] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 867.495074] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1d0339-2b20-47e7-b5dc-25f8489175bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.503019] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 867.503019] env[62914]: value = "task-4831997" [ 867.503019] env[62914]: _type = "Task" [ 867.503019] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.512146] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.560348] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.596484] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.261s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.599147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.289s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.600578] env[62914]: DEBUG nova.objects.instance [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'resources' on Instance uuid baf28ebf-3ab8-465c-a13b-705ccf3510dc {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.626750] env[62914]: INFO nova.scheduler.client.report [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Deleted allocations for instance 1ec89a28-d4f3-4324-bf14-c99c5ce05950 [ 867.680166] env[62914]: DEBUG nova.network.neutron [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.686972] env[62914]: DEBUG oslo_vmware.api [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4831993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198826} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.687573] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.687786] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 867.687977] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 867.689911] env[62914]: INFO nova.compute.manager [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Took 2.15 seconds to destroy the instance on the hypervisor. [ 867.691612] env[62914]: DEBUG oslo.service.loopingcall [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.693787] env[62914]: DEBUG nova.compute.manager [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 867.693787] env[62914]: DEBUG nova.network.neutron [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 867.716963] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce9299-3290-b997-9142-d5d8f8c8c3af, 'name': SearchDatastore_Task, 'duration_secs': 0.011033} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.717329] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.717606] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] c488ba7b-68cc-4876-934f-a11d33fca6ab/c488ba7b-68cc-4876-934f-a11d33fca6ab.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 867.717877] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0100058d-954d-43ca-80a8-ce07d4c7f877 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.726540] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 867.726540] env[62914]: value = "task-4831998" [ 867.726540] env[62914]: _type = "Task" [ 867.726540] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.736336] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.981461] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4831996, 'name': CreateVM_Task, 'duration_secs': 0.338078} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.981873] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 867.982951] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.983468] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.983954] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.984449] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67406025-b1c6-4fdb-8ef7-c2229110161a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.992658] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 867.992658] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c2454-ccd5-93f2-6cf9-27e981686d94" [ 867.992658] env[62914]: _type = "Task" [ 867.992658] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.006146] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c2454-ccd5-93f2-6cf9-27e981686d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.021119] env[62914]: DEBUG oslo_vmware.api [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4831997, 'name': PowerOnVM_Task, 'duration_secs': 0.517929} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.021522] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 868.021772] env[62914]: INFO nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Took 9.00 seconds to spawn the instance on the hypervisor. [ 868.022022] env[62914]: DEBUG nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 868.022953] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc020b1-53a6-4277-906f-f4e2850c1f02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.065941] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.138801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e15ce992-ade1-4939-abed-24cf54142093 tempest-ServerPasswordTestJSON-2059654700 tempest-ServerPasswordTestJSON-2059654700-project-member] Lock "1ec89a28-d4f3-4324-bf14-c99c5ce05950" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.970s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.197027] env[62914]: INFO nova.compute.manager [-] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Took 1.34 seconds to deallocate network for instance. [ 868.241125] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831998, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.273559] env[62914]: DEBUG nova.network.neutron [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Updated VIF entry in instance network info cache for port 38e304a6-f447-44af-8c07-955d9d6a842f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 868.273559] env[62914]: DEBUG nova.network.neutron [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Updating instance_info_cache with network_info: [{"id": "38e304a6-f447-44af-8c07-955d9d6a842f", "address": "fa:16:3e:46:fb:cb", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38e304a6-f4", "ovs_interfaceid": "38e304a6-f447-44af-8c07-955d9d6a842f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.507128] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529c2454-ccd5-93f2-6cf9-27e981686d94, 'name': SearchDatastore_Task, 'duration_secs': 0.06927} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.507506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.507759] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.508014] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.508417] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.508618] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.508906] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-409a0cfd-b86c-48f7-b858-c3a7e317f338 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.522374] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.523100] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 868.523651] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-027943ec-f285-4194-909a-a44c172a6a9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.531353] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 868.531353] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c604d-53fe-0895-6f7d-19e4ecf6cb7c" [ 868.531353] env[62914]: _type = "Task" [ 868.531353] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.545238] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c604d-53fe-0895-6f7d-19e4ecf6cb7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.550521] env[62914]: INFO nova.compute.manager [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Took 56.47 seconds to build instance. [ 868.562516] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.642076] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1ebced-2803-4e30-9310-0e77392d1975 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.650939] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac658f26-c59e-4790-aac9-c6671044e454 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.683497] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40e67b3-299b-470a-8e42-4099e85715e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.694363] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a3fa77-a40d-4587-b3d4-68f64e60df30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.710998] env[62914]: DEBUG nova.network.neutron [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.715786] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.715786] env[62914]: DEBUG nova.compute.provider_tree [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.739666] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559486} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.739970] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] c488ba7b-68cc-4876-934f-a11d33fca6ab/c488ba7b-68cc-4876-934f-a11d33fca6ab.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 868.740225] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.740528] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f1dda48-29f9-4453-8d71-b5e9d9c6cfd5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.748327] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 868.748327] env[62914]: value = "task-4831999" [ 868.748327] env[62914]: _type = "Task" [ 868.748327] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.757234] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.775700] env[62914]: DEBUG oslo_concurrency.lockutils [req-b52d2b79-64f0-400b-83a8-3dd0bfd50de1 req-cf9eb553-33c9-497e-8491-68b03a7a01d2 service nova] Releasing lock "refresh_cache-12e8b0ac-0dec-4928-ae65-ab53992ecab5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.815690] env[62914]: DEBUG nova.compute.manager [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Received event network-vif-plugged-cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 868.816114] env[62914]: DEBUG oslo_concurrency.lockutils [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] Acquiring lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.816206] env[62914]: DEBUG oslo_concurrency.lockutils [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.816431] env[62914]: DEBUG oslo_concurrency.lockutils [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.817528] env[62914]: DEBUG nova.compute.manager [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] No waiting events found dispatching network-vif-plugged-cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 868.817528] env[62914]: WARNING nova.compute.manager [req-d3c9b9db-1bee-48dd-9b4a-2468787a1b06 req-96e5aec9-f15b-4a59-a559-2a898d46982a service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Received unexpected event network-vif-plugged-cda3d580-68ab-4c88-9773-f8fb5b8394f2 for instance with vm_state building and task_state spawning. [ 869.043443] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c604d-53fe-0895-6f7d-19e4ecf6cb7c, 'name': SearchDatastore_Task, 'duration_secs': 0.012154} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.044283] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982170a2-6468-495f-b124-ef4fe922468f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.052646] env[62914]: DEBUG oslo_concurrency.lockutils [None req-95ed1a6f-5ea9-45df-91c9-5ce43238be56 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.520s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.053010] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 869.053010] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52621469-3944-1507-d8ed-7ed7f1a6baea" [ 869.053010] env[62914]: _type = "Task" [ 869.053010] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.060785] env[62914]: DEBUG nova.compute.manager [req-3a22e5dd-bf32-427c-869c-0a48e370a4f6 req-06983659-08ad-47f6-92f9-d02200f1e397 service nova] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Received event network-vif-deleted-a0474c06-0c2f-4fe9-8636-c16aa5b3bca8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 869.061041] env[62914]: DEBUG nova.compute.manager [req-3a22e5dd-bf32-427c-869c-0a48e370a4f6 req-06983659-08ad-47f6-92f9-d02200f1e397 service nova] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Received event network-vif-deleted-5318eee4-4de3-4b6a-acef-6991fa42dabc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 869.072716] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4831985, 'name': CloneVM_Task, 'duration_secs': 3.347068} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.077522] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Created linked-clone VM from snapshot [ 869.077874] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52621469-3944-1507-d8ed-7ed7f1a6baea, 'name': SearchDatastore_Task, 'duration_secs': 0.016406} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.079320] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46b2fac-60a0-4b58-9795-f7e314e4eb91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.082060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.085022] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 12e8b0ac-0dec-4928-ae65-ab53992ecab5/12e8b0ac-0dec-4928-ae65-ab53992ecab5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 869.085022] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36fdf646-d365-42d7-a176-d19358e89de1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.086882] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Successfully updated port: cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.091785] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Uploading image 2ebc6dd3-004e-4687-828d-5c558c734f0f {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 869.097835] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 869.097835] env[62914]: value = "task-4832000" [ 869.097835] env[62914]: _type = "Task" [ 869.097835] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.110026] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.128965] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 869.128965] env[62914]: value = "vm-941958" [ 869.128965] env[62914]: _type = "VirtualMachine" [ 869.128965] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 869.130532] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-082cfa6b-2ba1-44c5-a050-577a3807b766 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.141022] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease: (returnval){ [ 869.141022] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bc580-b830-bdbb-d253-c66885b2746d" [ 869.141022] env[62914]: _type = "HttpNfcLease" [ 869.141022] env[62914]: } obtained for exporting VM: (result){ [ 869.141022] env[62914]: value = "vm-941958" [ 869.141022] env[62914]: _type = "VirtualMachine" [ 869.141022] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 869.141022] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the lease: (returnval){ [ 869.141022] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bc580-b830-bdbb-d253-c66885b2746d" [ 869.141022] env[62914]: _type = "HttpNfcLease" [ 869.141022] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 869.146565] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.146565] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bc580-b830-bdbb-d253-c66885b2746d" [ 869.146565] env[62914]: _type = "HttpNfcLease" [ 869.146565] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 869.215753] env[62914]: INFO nova.compute.manager [-] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Took 1.52 seconds to deallocate network for instance. [ 869.216661] env[62914]: DEBUG nova.scheduler.client.report [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.258887] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4831999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076227} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.259690] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.260028] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db019a0d-7ec4-4366-87d3-c330604cc9d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.286188] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] c488ba7b-68cc-4876-934f-a11d33fca6ab/c488ba7b-68cc-4876-934f-a11d33fca6ab.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.286879] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a75f0cab-8dd4-4c2b-8f17-3cd4ddf9be33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.308211] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 869.308211] env[62914]: value = "task-4832002" [ 869.308211] env[62914]: _type = "Task" [ 869.308211] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.318345] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832002, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.446641] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "982936be-3cb1-4930-b135-8fc2019c5216" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.446809] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.446973] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "982936be-3cb1-4930-b135-8fc2019c5216-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.447188] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.447442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.450069] env[62914]: INFO nova.compute.manager [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Terminating instance [ 869.453476] env[62914]: DEBUG nova.compute.manager [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 869.453641] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 869.454574] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dbc95c-75e4-484b-b1a6-3891a3fe5553 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.459955] env[62914]: DEBUG nova.compute.manager [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Received event network-changed-cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 869.460174] env[62914]: DEBUG nova.compute.manager [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Refreshing instance network info cache due to event network-changed-cda3d580-68ab-4c88-9773-f8fb5b8394f2. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 869.460460] env[62914]: DEBUG oslo_concurrency.lockutils [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] Acquiring lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.460580] env[62914]: DEBUG oslo_concurrency.lockutils [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] Acquired lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.460747] env[62914]: DEBUG nova.network.neutron [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Refreshing network info cache for port cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 869.467414] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 869.467714] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08db74e6-974d-4f94-a232-991bad5081a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.477576] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 869.477576] env[62914]: value = "task-4832003" [ 869.477576] env[62914]: _type = "Task" [ 869.477576] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.492195] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.565860] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 869.598151] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.612771] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832000, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.650035] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.650035] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bc580-b830-bdbb-d253-c66885b2746d" [ 869.650035] env[62914]: _type = "HttpNfcLease" [ 869.650035] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 869.650465] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 869.650465] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520bc580-b830-bdbb-d253-c66885b2746d" [ 869.650465] env[62914]: _type = "HttpNfcLease" [ 869.650465] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 869.651350] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40085193-b55c-49bc-b658-53decaf93da9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.663830] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 869.664061] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 869.724452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.728154] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.247s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.729797] env[62914]: INFO nova.compute.claims [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.734643] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.753733] env[62914]: INFO nova.scheduler.client.report [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted allocations for instance baf28ebf-3ab8-465c-a13b-705ccf3510dc [ 869.793334] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1deb1a89-2152-4b65-aac0-7c228698c252 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.828596] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832002, 'name': ReconfigVM_Task, 'duration_secs': 0.3831} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.833851] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Reconfigured VM instance instance-00000042 to attach disk [datastore2] c488ba7b-68cc-4876-934f-a11d33fca6ab/c488ba7b-68cc-4876-934f-a11d33fca6ab.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.835016] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffc2f852-6eff-43cf-a06f-e746421e9a6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.847253] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 869.847253] env[62914]: value = "task-4832004" [ 869.847253] env[62914]: _type = "Task" [ 869.847253] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.861113] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832004, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.989483] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832003, 'name': PowerOffVM_Task, 'duration_secs': 0.236514} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.990187] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 869.990453] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 869.990780] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-006440b1-1ebe-4410-b583-e3e906ca8b96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.029561] env[62914]: DEBUG nova.network.neutron [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.053241] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 870.053488] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 870.053698] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore2] 982936be-3cb1-4930-b135-8fc2019c5216 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.053968] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aace7f11-fe21-45f2-98b7-f8d4113e5a3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.062219] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 870.062219] env[62914]: value = "task-4832006" [ 870.062219] env[62914]: _type = "Task" [ 870.062219] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.074980] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.094415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.108325] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701636} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.108598] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 12e8b0ac-0dec-4928-ae65-ab53992ecab5/12e8b0ac-0dec-4928-ae65-ab53992ecab5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 870.108818] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.109110] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75493633-d7ac-4e2b-8412-5e985ab4be84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.118157] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 870.118157] env[62914]: value = "task-4832007" [ 870.118157] env[62914]: _type = "Task" [ 870.118157] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.128338] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.139637] env[62914]: DEBUG nova.network.neutron [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.262845] env[62914]: DEBUG oslo_concurrency.lockutils [None req-74936c65-bc88-4b1c-8287-5527153ac62c tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "baf28ebf-3ab8-465c-a13b-705ccf3510dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.976s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.362080] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832004, 'name': Rename_Task, 'duration_secs': 0.15537} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.364019] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 870.364019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77814464-78ff-4b51-8583-6927922532ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.374679] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 870.374679] env[62914]: value = "task-4832008" [ 870.374679] env[62914]: _type = "Task" [ 870.374679] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.386474] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.577676] env[62914]: DEBUG oslo_vmware.api [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155697} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.577676] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.577676] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 870.577676] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.577676] env[62914]: INFO nova.compute.manager [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Took 1.12 seconds to destroy the instance on the hypervisor. [ 870.577676] env[62914]: DEBUG oslo.service.loopingcall [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.577676] env[62914]: DEBUG nova.compute.manager [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 870.577676] env[62914]: DEBUG nova.network.neutron [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 870.638376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.638947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.639495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.639897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.640295] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.642729] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071576} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.643415] env[62914]: DEBUG oslo_concurrency.lockutils [req-d0b4cd85-6088-41d6-9014-558d79c98b7d req-a018e67b-b365-4ffa-b91c-c226527eb0f4 service nova] Releasing lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.644071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.646023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.646023] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 870.650229] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71bdbb1-4314-4227-a88b-cdfbc1b97961 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.651460] env[62914]: INFO nova.compute.manager [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Terminating instance [ 870.655602] env[62914]: DEBUG nova.compute.manager [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 870.656123] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 870.657276] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a50211-eb11-442f-b567-e006ec876497 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.684624] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 12e8b0ac-0dec-4928-ae65-ab53992ecab5/12e8b0ac-0dec-4928-ae65-ab53992ecab5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.690136] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f774ac6-5ff9-4f20-a6a3-424d074b6046 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.712570] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 870.716573] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e0669e7-22af-49f0-8cbc-9c92cf38476e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.721566] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 870.721566] env[62914]: value = "task-4832009" [ 870.721566] env[62914]: _type = "Task" [ 870.721566] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.723617] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 870.723617] env[62914]: value = "task-4832010" [ 870.723617] env[62914]: _type = "Task" [ 870.723617] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.748773] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.756265] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.894431] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832008, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.245361] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832009, 'name': ReconfigVM_Task, 'duration_secs': 0.424457} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.251818] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 12e8b0ac-0dec-4928-ae65-ab53992ecab5/12e8b0ac-0dec-4928-ae65-ab53992ecab5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.253159] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832010, 'name': PowerOffVM_Task, 'duration_secs': 0.254725} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.254738] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e26a8095-3ae2-48c7-a8b4-7ed207b988ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.256768] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 871.260073] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 871.260073] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-616dd71f-ba32-44ae-80e5-0e4abb26e2d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.261252] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.266056] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 871.266056] env[62914]: value = "task-4832011" [ 871.266056] env[62914]: _type = "Task" [ 871.266056] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.279203] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832011, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.337861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 871.337861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 871.338239] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleting the datastore file [datastore2] b477cd62-49c2-4e3c-98ea-b4154dda4986 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 871.338377] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8259cce2-19ab-4c83-9220-e44c19af0058 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.353985] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 871.353985] env[62914]: value = "task-4832013" [ 871.353985] env[62914]: _type = "Task" [ 871.353985] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.365876] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.388052] env[62914]: DEBUG oslo_vmware.api [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832008, 'name': PowerOnVM_Task, 'duration_secs': 0.540287} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.388471] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 871.388743] env[62914]: INFO nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Took 9.62 seconds to spawn the instance on the hypervisor. [ 871.389041] env[62914]: DEBUG nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 871.390056] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f5df75-e657-45df-95ee-f0ed2ac36b9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.485849] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3258212-41d8-4826-8149-960d53b507ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.494275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92306c8-8d2c-4311-8381-9934a2eb5177 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.532224] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ff8bb0-7053-4da0-8695-b10be23fbf70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.542935] env[62914]: DEBUG nova.network.neutron [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Updating instance_info_cache with network_info: [{"id": "cda3d580-68ab-4c88-9773-f8fb5b8394f2", "address": "fa:16:3e:ce:04:2c", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcda3d580-68", "ovs_interfaceid": "cda3d580-68ab-4c88-9773-f8fb5b8394f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.545113] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464a84cb-d65b-447b-8341-948992adeaf6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.562777] env[62914]: DEBUG nova.compute.provider_tree [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.703625] env[62914]: DEBUG nova.network.neutron [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.782371] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832011, 'name': Rename_Task, 'duration_secs': 0.183733} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.782371] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 871.782371] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39bcebb3-47fe-4229-abb5-b37cd41c52ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.789089] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 871.789089] env[62914]: value = "task-4832014" [ 871.789089] env[62914]: _type = "Task" [ 871.789089] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.799523] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.854660] env[62914]: DEBUG nova.compute.manager [req-b9910e5a-6c03-4ba5-b457-6e948885287f req-b3668ec3-075c-4a2d-8d6d-5bbb2bab6c2e service nova] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Received event network-vif-deleted-a6bab462-94fa-4095-9dbf-83474e5057dd {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 871.870078] env[62914]: DEBUG oslo_vmware.api [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188749} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.870655] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.870906] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 871.871224] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 871.871647] env[62914]: INFO nova.compute.manager [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Took 1.22 seconds to destroy the instance on the hypervisor. [ 871.871868] env[62914]: DEBUG oslo.service.loopingcall [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.872188] env[62914]: DEBUG nova.compute.manager [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 871.872346] env[62914]: DEBUG nova.network.neutron [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 871.923046] env[62914]: INFO nova.compute.manager [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Took 55.12 seconds to build instance. [ 872.045276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "refresh_cache-ea06d3c3-d836-4e66-ac66-42f9886cd5de" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.045702] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance network_info: |[{"id": "cda3d580-68ab-4c88-9773-f8fb5b8394f2", "address": "fa:16:3e:ce:04:2c", "network": {"id": "5f53af1a-9021-4ca5-990d-b8dbff8b6dc9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eed5cec4d5bf40c1ae2fdf52f374b697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcda3d580-68", "ovs_interfaceid": "cda3d580-68ab-4c88-9773-f8fb5b8394f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 872.046252] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:04:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cda3d580-68ab-4c88-9773-f8fb5b8394f2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.057269] env[62914]: DEBUG oslo.service.loopingcall [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.057658] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 872.058026] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7a47a5c-b473-4379-a7c7-69a13c16c016 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.075880] env[62914]: DEBUG nova.scheduler.client.report [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.087042] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.087042] env[62914]: value = "task-4832015" [ 872.087042] env[62914]: _type = "Task" [ 872.087042] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.099933] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832015, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.207761] env[62914]: INFO nova.compute.manager [-] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Took 1.63 seconds to deallocate network for instance. [ 872.302465] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832014, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.429012] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b3276513-5278-45fe-94a4-88d9db8b717c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.366s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.551329] env[62914]: DEBUG nova.compute.manager [req-8a42d679-4ebe-4044-b6f1-c3fc6dd680cc req-5754ab8b-2af2-4892-878e-0045134045bc service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Received event network-vif-deleted-08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 872.551532] env[62914]: INFO nova.compute.manager [req-8a42d679-4ebe-4044-b6f1-c3fc6dd680cc req-5754ab8b-2af2-4892-878e-0045134045bc service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Neutron deleted interface 08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1; detaching it from the instance and deleting it from the info cache [ 872.551703] env[62914]: DEBUG nova.network.neutron [req-8a42d679-4ebe-4044-b6f1-c3fc6dd680cc req-5754ab8b-2af2-4892-878e-0045134045bc service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.581706] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.853s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.582363] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 872.586132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.702s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.587873] env[62914]: INFO nova.compute.claims [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.601822] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832015, 'name': CreateVM_Task, 'duration_secs': 0.3727} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.602874] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 872.603851] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.604108] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.604528] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.604833] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a68906b-956b-4f2a-ba1c-d2bb46989bfc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.614586] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 872.614586] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c41d0b-7d72-1753-b4a7-325e2de45c64" [ 872.614586] env[62914]: _type = "Task" [ 872.614586] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.628776] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c41d0b-7d72-1753-b4a7-325e2de45c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.698458] env[62914]: DEBUG nova.network.neutron [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.715954] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.803298] env[62914]: DEBUG oslo_vmware.api [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832014, 'name': PowerOnVM_Task, 'duration_secs': 0.931264} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.803645] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 872.803923] env[62914]: INFO nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 8.23 seconds to spawn the instance on the hypervisor. [ 872.804146] env[62914]: DEBUG nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 872.805090] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc64868-8fd4-4679-bd40-742443c843dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.933143] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 873.064323] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a231e84-4613-447a-9cb5-ff604edfda8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.076330] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e746c6-a6ef-473f-a3d8-85f5774df2f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.089734] env[62914]: DEBUG nova.compute.utils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 873.090113] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 873.090289] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 873.121149] env[62914]: DEBUG nova.compute.manager [req-8a42d679-4ebe-4044-b6f1-c3fc6dd680cc req-5754ab8b-2af2-4892-878e-0045134045bc service nova] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Detach interface failed, port_id=08e0b9bd-1c95-4c6f-919b-4c05e8ef81d1, reason: Instance b477cd62-49c2-4e3c-98ea-b4154dda4986 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 873.132470] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c41d0b-7d72-1753-b4a7-325e2de45c64, 'name': SearchDatastore_Task, 'duration_secs': 0.01243} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.132838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.133113] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.133368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.133524] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.133738] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.134405] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6fbea61-08fb-4db4-a587-326121427386 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.141536] env[62914]: DEBUG nova.policy [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8051a41d4b7f40ac885c45f9322cef8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '748d871d6c2540a98bccd2492302f699', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 873.145560] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.145778] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 873.146635] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd0b1037-69c3-4a1a-bc34-67d255b97668 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.153431] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 873.153431] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a782c-60b5-54d7-06a0-674f94f91954" [ 873.153431] env[62914]: _type = "Task" [ 873.153431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.163493] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a782c-60b5-54d7-06a0-674f94f91954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.205601] env[62914]: INFO nova.compute.manager [-] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Took 1.33 seconds to deallocate network for instance. [ 873.327711] env[62914]: INFO nova.compute.manager [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 47.47 seconds to build instance. [ 873.429927] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.430114] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.430959] env[62914]: INFO nova.compute.manager [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Rebooting instance [ 873.456936] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Successfully created port: 1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.465556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.594820] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 873.669249] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a782c-60b5-54d7-06a0-674f94f91954, 'name': SearchDatastore_Task, 'duration_secs': 0.019172} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.671018] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a7ad029-3d91-47bf-82b7-6b58240ac63e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.682510] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 873.682510] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5935-e6d2-5e6f-7b86-68463b934735" [ 873.682510] env[62914]: _type = "Task" [ 873.682510] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.694026] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5935-e6d2-5e6f-7b86-68463b934735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.714684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.829960] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae286c0b-4b2d-41bb-8a29-db1ec7cf8e61 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.764s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.916578] env[62914]: DEBUG nova.compute.manager [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 873.917111] env[62914]: DEBUG nova.compute.manager [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing instance network info cache due to event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 873.917826] env[62914]: DEBUG oslo_concurrency.lockutils [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] Acquiring lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.918122] env[62914]: DEBUG oslo_concurrency.lockutils [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] Acquired lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.918502] env[62914]: DEBUG nova.network.neutron [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 873.963688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.111944] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cc6732-171f-412f-9664-5fb2e08910e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.120575] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0241ed7-c035-40d1-898e-1e22f708a69e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.153261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762da760-b07c-4a72-92be-aac5fe87a317 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.162578] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03bdfa7-da9a-44cc-840b-48c4778ca87b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.179042] env[62914]: DEBUG nova.compute.provider_tree [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.193379] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ff5935-e6d2-5e6f-7b86-68463b934735, 'name': SearchDatastore_Task, 'duration_secs': 0.014716} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.193662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.193925] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ea06d3c3-d836-4e66-ac66-42f9886cd5de/ea06d3c3-d836-4e66-ac66-42f9886cd5de.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 874.194199] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35a9f10c-2eac-4dff-96a0-47c64a94be56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.201565] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 874.201565] env[62914]: value = "task-4832016" [ 874.201565] env[62914]: _type = "Task" [ 874.201565] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.210450] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.333074] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 874.607910] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 874.647565] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.647989] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.648294] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.648666] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.648943] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.649259] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.649622] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.650657] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.650657] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.650657] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.650868] env[62914]: DEBUG nova.virt.hardware [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.652753] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9078012-c90e-40f5-ac0d-27a126b08782 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.664233] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da180ab9-c8ae-4cec-b2e8-5728536bf746 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.692906] env[62914]: DEBUG nova.scheduler.client.report [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.717745] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832016, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.783553] env[62914]: DEBUG nova.network.neutron [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updated VIF entry in instance network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 874.784040] env[62914]: DEBUG nova.network.neutron [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.861028] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.001452] env[62914]: DEBUG nova.compute.manager [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Received event network-vif-plugged-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 875.001710] env[62914]: DEBUG oslo_concurrency.lockutils [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] Acquiring lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.001935] env[62914]: DEBUG oslo_concurrency.lockutils [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.002995] env[62914]: DEBUG oslo_concurrency.lockutils [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.003299] env[62914]: DEBUG nova.compute.manager [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] No waiting events found dispatching network-vif-plugged-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 875.003507] env[62914]: WARNING nova.compute.manager [req-3f89a5cc-cff1-4516-b78c-719355a555bd req-5ef25569-bebd-448a-9181-3782ffdc3e3c service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Received unexpected event network-vif-plugged-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 for instance with vm_state building and task_state spawning. [ 875.091328] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Successfully updated port: 1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.200623] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.201255] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 875.204297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.034s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.204552] env[62914]: DEBUG nova.objects.instance [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'resources' on Instance uuid 2d48056c-d38f-4be1-b28b-71da14607870 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.215966] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.754161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.216270] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ea06d3c3-d836-4e66-ac66-42f9886cd5de/ea06d3c3-d836-4e66-ac66-42f9886cd5de.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 875.216491] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.216752] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-565b359d-aaff-4f70-bc70-21f9e2c040da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.225344] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 875.225344] env[62914]: value = "task-4832017" [ 875.225344] env[62914]: _type = "Task" [ 875.225344] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.236353] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832017, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.287223] env[62914]: DEBUG oslo_concurrency.lockutils [req-c0517f07-453a-4539-b7f6-d74deeec599d req-62f67722-fdee-4205-9d71-518b58eef4e4 service nova] Releasing lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.287822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquired lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.287986] env[62914]: DEBUG nova.network.neutron [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 875.593429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.593629] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquired lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.593793] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 875.711833] env[62914]: DEBUG nova.compute.utils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 875.712717] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 875.713046] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 875.737093] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832017, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07352} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.739893] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.740962] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e88abd-7ed0-4765-8549-baf08fab0498 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.768665] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] ea06d3c3-d836-4e66-ac66-42f9886cd5de/ea06d3c3-d836-4e66-ac66-42f9886cd5de.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.773096] env[62914]: DEBUG nova.policy [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b21fade54c4e45ec970a73d404ea69ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23c2dfbda62544b8bdba7832e31a0f27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 875.774990] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab21ccf0-fdc7-4212-9c3d-05e2052dda89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.802328] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 875.802328] env[62914]: value = "task-4832018" [ 875.802328] env[62914]: _type = "Task" [ 875.802328] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.818136] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832018, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.091899] env[62914]: DEBUG nova.network.neutron [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.138998] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 876.143598] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Successfully created port: 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.216379] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 876.255687] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93513a6a-a8f6-4e76-9c5b-f402bf65deb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.264075] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967f870a-1bdf-4d6f-a543-a66782582420 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.443216] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3d922f-1fc6-44b5-baf7-439e718589d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.458680] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.462469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7770ad67-2bad-4fed-91ee-747daf8926a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.492298] env[62914]: DEBUG nova.compute.provider_tree [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.594975] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Releasing lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.597614] env[62914]: DEBUG nova.compute.manager [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 876.598643] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e053e4b-c57f-4c3c-b969-097d54e50354 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.656893] env[62914]: DEBUG nova.network.neutron [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Updating instance_info_cache with network_info: [{"id": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "address": "fa:16:3e:a7:54:b6", "network": {"id": "ab18a9ea-5282-48f2-aca7-56aa6a2418ae", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623779984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "748d871d6c2540a98bccd2492302f699", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f51697b-7d", "ovs_interfaceid": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.943140] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832018, 'name': ReconfigVM_Task, 'duration_secs': 0.758345} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.943479] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Reconfigured VM instance instance-00000044 to attach disk [datastore2] ea06d3c3-d836-4e66-ac66-42f9886cd5de/ea06d3c3-d836-4e66-ac66-42f9886cd5de.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.944206] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18f53228-431c-4756-9271-362d7790c38b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.953038] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 876.953038] env[62914]: value = "task-4832019" [ 876.953038] env[62914]: _type = "Task" [ 876.953038] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.963111] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832019, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.994554] env[62914]: DEBUG nova.scheduler.client.report [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.160155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Releasing lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.160537] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Instance network_info: |[{"id": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "address": "fa:16:3e:a7:54:b6", "network": {"id": "ab18a9ea-5282-48f2-aca7-56aa6a2418ae", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623779984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "748d871d6c2540a98bccd2492302f699", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f51697b-7d", "ovs_interfaceid": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 877.161500] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:54:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.169794] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Creating folder: Project (748d871d6c2540a98bccd2492302f699). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 877.170597] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b44cd4de-3ac4-422f-841a-167617b8ecbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.183398] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Created folder: Project (748d871d6c2540a98bccd2492302f699) in parent group-v941773. [ 877.183618] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Creating folder: Instances. Parent ref: group-v941963. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 877.183889] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0463703e-6494-42e7-a70c-19a8d106b3c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.197833] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Created folder: Instances in parent group-v941963. [ 877.198154] env[62914]: DEBUG oslo.service.loopingcall [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.198795] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 877.198795] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-359f6211-2e02-4e1c-b2a8-9d74baa5585a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.220062] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.220062] env[62914]: value = "task-4832022" [ 877.220062] env[62914]: _type = "Task" [ 877.220062] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.229707] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832022, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.232286] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 877.261361] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.261623] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.261780] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.261960] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.262764] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.262993] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.263269] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.263915] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.263915] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.263915] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.264080] env[62914]: DEBUG nova.virt.hardware [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.264922] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b00517-4a78-4d2c-be8a-3fb8fe4952ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.274072] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dffbd57-3f8d-456b-a987-7a5d478c1e04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.282875] env[62914]: DEBUG nova.compute.manager [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Received event network-changed-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 877.282986] env[62914]: DEBUG nova.compute.manager [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Refreshing instance network info cache due to event network-changed-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 877.283225] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] Acquiring lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.283381] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] Acquired lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.284144] env[62914]: DEBUG nova.network.neutron [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Refreshing network info cache for port 1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 877.464718] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832019, 'name': Rename_Task, 'duration_secs': 0.25438} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.467388] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 877.467626] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6154953-d41c-40ae-8772-6aedf35422db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.476336] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 877.476336] env[62914]: value = "task-4832023" [ 877.476336] env[62914]: _type = "Task" [ 877.476336] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.484219] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.501132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.503967] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.236s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.506338] env[62914]: INFO nova.compute.claims [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.535556] env[62914]: INFO nova.scheduler.client.report [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted allocations for instance 2d48056c-d38f-4be1-b28b-71da14607870 [ 877.617038] env[62914]: DEBUG nova.network.neutron [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Updated VIF entry in instance network info cache for port 1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 877.617588] env[62914]: DEBUG nova.network.neutron [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Updating instance_info_cache with network_info: [{"id": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "address": "fa:16:3e:a7:54:b6", "network": {"id": "ab18a9ea-5282-48f2-aca7-56aa6a2418ae", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623779984-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "748d871d6c2540a98bccd2492302f699", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f51697b-7d", "ovs_interfaceid": "1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.626503] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3522960-14db-4509-89dd-cba2f365a874 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.635323] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Doing hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 877.635641] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1da16f3a-149b-4f3b-af13-7ec9ca07f064 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.644422] env[62914]: DEBUG oslo_vmware.api [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 877.644422] env[62914]: value = "task-4832024" [ 877.644422] env[62914]: _type = "Task" [ 877.644422] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.657412] env[62914]: DEBUG oslo_vmware.api [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832024, 'name': ResetVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.733425] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832022, 'name': CreateVM_Task, 'duration_secs': 0.401632} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.734248] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 877.734967] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.735366] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.735939] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.739027] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16434f4c-ac11-4fbc-87ca-2fb7fbb865c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.746337] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 877.746337] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527cc9b4-d074-a988-052b-5877282a4e33" [ 877.746337] env[62914]: _type = "Task" [ 877.746337] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.755859] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527cc9b4-d074-a988-052b-5877282a4e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.900404] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Successfully updated port: 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.988393] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.044754] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f245d120-664c-4e0b-b6ca-3727911a4b13 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "2d48056c-d38f-4be1-b28b-71da14607870" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.689s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.045668] env[62914]: DEBUG oslo_concurrency.lockutils [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] Acquired lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.046766] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d48f83-1a00-4320-9880-7a9952a1a69f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.056407] env[62914]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 878.056407] env[62914]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62914) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 878.056659] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a01b8a9b-af2e-45ee-a8f3-d2d5f023451d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.067016] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e402f013-3f65-4326-b0e4-5bd0f1694172 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.105382] env[62914]: ERROR root [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-941921' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-941921' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-941921' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-941921'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-941921' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-941921' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-941921'}\n"]: nova.exception.InstanceNotFound: Instance 2d48056c-d38f-4be1-b28b-71da14607870 could not be found. [ 878.105615] env[62914]: DEBUG oslo_concurrency.lockutils [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] Releasing lock "2d48056c-d38f-4be1-b28b-71da14607870" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.105842] env[62914]: DEBUG nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Detach interface failed, port_id=55d78b2e-b665-4a1c-84fe-47e02f937395, reason: Instance 2d48056c-d38f-4be1-b28b-71da14607870 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 878.106044] env[62914]: DEBUG nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Received event network-vif-deleted-0acbfa32-adde-4a6e-bfb6-c745a0b8524a {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 878.106233] env[62914]: INFO nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Neutron deleted interface 0acbfa32-adde-4a6e-bfb6-c745a0b8524a; detaching it from the instance and deleting it from the info cache [ 878.106497] env[62914]: DEBUG nova.network.neutron [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Updating instance_info_cache with network_info: [{"id": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "address": "fa:16:3e:be:c8:0d", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e36426-30", "ovs_interfaceid": "06e36426-302a-4bcd-bb7a-f9d6dd3a72c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.120631] env[62914]: DEBUG oslo_concurrency.lockutils [req-b4ae966e-422b-4cad-aef8-87262dabb8b5 req-f3be79eb-36e6-4c8a-b10a-bc4c3a7e3416 service nova] Releasing lock "refresh_cache-d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.156668] env[62914]: DEBUG oslo_vmware.api [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832024, 'name': ResetVM_Task, 'duration_secs': 0.10974} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.156668] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Did hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 878.156991] env[62914]: DEBUG nova.compute.manager [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 878.157903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb08623-c8f2-4b79-90a7-613b61c4a1f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.259401] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527cc9b4-d074-a988-052b-5877282a4e33, 'name': SearchDatastore_Task, 'duration_secs': 0.014915} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.260147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.260592] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.261062] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.263118] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.263118] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.263118] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bf42d96-4591-4bc6-9ff9-0d095341222b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.274674] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.274875] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 878.276035] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ec6fec-496e-49e0-8ef5-c94f945af94e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.282239] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 878.282239] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123f71-2bb4-66e8-499f-f39f7be43145" [ 878.282239] env[62914]: _type = "Task" [ 878.282239] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.292954] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123f71-2bb4-66e8-499f-f39f7be43145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.405195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.405725] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.406579] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.488606] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.610032] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fc3f4ec-9486-457a-aa9d-11af40059558 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.621736] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf73532-b4b0-4292-b968-8d68fdc844a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.664085] env[62914]: DEBUG nova.compute.manager [req-543111dd-d4df-41f5-9654-9a15a32d7617 req-33ab5ea5-5ee9-4e44-baa0-80c048303727 service nova] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Detach interface failed, port_id=0acbfa32-adde-4a6e-bfb6-c745a0b8524a, reason: Instance 2d48056c-d38f-4be1-b28b-71da14607870 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 878.670620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1004fa6a-c8d4-4377-8cc9-b8f9b0986214 tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.240s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.794855] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52123f71-2bb4-66e8-499f-f39f7be43145, 'name': SearchDatastore_Task, 'duration_secs': 0.027207} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.795739] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-859feabe-6548-444b-a23b-6cb2483b8571 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.801763] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 878.801763] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c17aba-4457-35a0-70f0-0386830245f2" [ 878.801763] env[62914]: _type = "Task" [ 878.801763] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.813163] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c17aba-4457-35a0-70f0-0386830245f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.964059] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.993789] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.029555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df673fc-ff31-47d2-be9f-b1050346b506 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.038448] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be994f6-37be-40af-a8b2-d4312b955e5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.075484] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f954e9-8bdc-489d-90cc-1a02000cf701 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.087961] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab5026c-6756-49d1-8389-44339f9e78a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.105042] env[62914]: DEBUG nova.compute.provider_tree [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.163131] env[62914]: DEBUG nova.network.neutron [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.316029] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c17aba-4457-35a0-70f0-0386830245f2, 'name': SearchDatastore_Task, 'duration_secs': 0.023323} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.316029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.316029] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f/d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 879.316029] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7175d06-80fa-4d86-9925-7404c925e6ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.326016] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 879.326016] env[62914]: value = "task-4832025" [ 879.326016] env[62914]: _type = "Task" [ 879.326016] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.333659] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.493876] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.608829] env[62914]: DEBUG nova.scheduler.client.report [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.664521] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.665379] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Instance network_info: |[{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 879.668172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:22:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dc57e52-6e86-4d59-bf3e-c46b60446825', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.675571] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Creating folder: Project (23c2dfbda62544b8bdba7832e31a0f27). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.676872] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c289418-40e8-44cd-a7f7-795458fffe3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.690145] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Created folder: Project (23c2dfbda62544b8bdba7832e31a0f27) in parent group-v941773. [ 879.691990] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Creating folder: Instances. Parent ref: group-v941966. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.691990] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7919fc43-40b5-4950-b7b9-40a499f0619c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.706572] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Created folder: Instances in parent group-v941966. [ 879.707420] env[62914]: DEBUG oslo.service.loopingcall [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.707708] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 879.707966] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83ff39e7-02a0-42b5-bec1-9c84a2bbe072 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.732809] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.732809] env[62914]: value = "task-4832028" [ 879.732809] env[62914]: _type = "Task" [ 879.732809] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.743913] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832028, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.796792] env[62914]: DEBUG nova.compute.manager [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-vif-plugged-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 879.797042] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Acquiring lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.797379] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.797593] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.797791] env[62914]: DEBUG nova.compute.manager [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] No waiting events found dispatching network-vif-plugged-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 879.797989] env[62914]: WARNING nova.compute.manager [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received unexpected event network-vif-plugged-3dc57e52-6e86-4d59-bf3e-c46b60446825 for instance with vm_state building and task_state spawning. [ 879.798174] env[62914]: DEBUG nova.compute.manager [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 879.798334] env[62914]: DEBUG nova.compute.manager [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing instance network info cache due to event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 879.798533] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.798706] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.798871] env[62914]: DEBUG nova.network.neutron [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 879.837840] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832025, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.991020] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.119544] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.120310] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 880.123142] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 30.567s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.243400] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832028, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.337403] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559488} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.337768] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f/d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 880.340278] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.340278] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-467a5cb9-d4b6-4fb8-8227-f66092062834 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.351062] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 880.351062] env[62914]: value = "task-4832029" [ 880.351062] env[62914]: _type = "Task" [ 880.351062] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.362828] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.491315] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.555292] env[62914]: DEBUG nova.network.neutron [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updated VIF entry in instance network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 880.555292] env[62914]: DEBUG nova.network.neutron [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.626739] env[62914]: DEBUG nova.compute.utils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.628429] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 880.630090] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.634493] env[62914]: INFO nova.compute.claims [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.703477] env[62914]: DEBUG nova.policy [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5e4285d82cd420bb797f2fadf6b034d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ffdaa966ecb4979845fda7778c7fb45', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 880.745194] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832028, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.759174] env[62914]: DEBUG nova.compute.manager [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 880.759400] env[62914]: DEBUG nova.compute.manager [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing instance network info cache due to event network-changed-aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 880.759631] env[62914]: DEBUG oslo_concurrency.lockutils [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] Acquiring lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.759783] env[62914]: DEBUG oslo_concurrency.lockutils [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] Acquired lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.760183] env[62914]: DEBUG nova.network.neutron [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Refreshing network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 880.863297] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08332} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.864035] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.864433] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd8c046-7079-4ed9-a409-237b6f56cc21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.895364] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f/d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.896284] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe164a6b-1b2c-4919-9afd-3c83d83e2d1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.923397] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 880.923397] env[62914]: value = "task-4832030" [ 880.923397] env[62914]: _type = "Task" [ 880.923397] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.935943] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832030, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.993175] env[62914]: DEBUG oslo_vmware.api [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832023, 'name': PowerOnVM_Task, 'duration_secs': 3.275714} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.993502] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 880.993987] env[62914]: INFO nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Took 13.61 seconds to spawn the instance on the hypervisor. [ 880.994179] env[62914]: DEBUG nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 880.999354] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52ec8c1-ac8c-4155-b079-40b0d5028f0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.057914] env[62914]: DEBUG oslo_concurrency.lockutils [req-99473baf-bf14-417d-8f92-3600e4d8a806 req-5e19d9ad-b634-4643-8375-8bc908c0121a service nova] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.096122] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Successfully created port: 00706251-f634-4dcb-9705-105152de241f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.139521] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 881.144929] env[62914]: INFO nova.compute.resource_tracker [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating resource usage from migration 8e667dab-3203-4a05-bc45-d9153ad9cc64 [ 881.245856] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832028, 'name': CreateVM_Task, 'duration_secs': 1.084939} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.248569] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 881.249643] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.249747] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.250072] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.250682] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fe1e1e5-039f-42de-8ab0-d0a917973c40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.256071] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 881.256071] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6e39-75c4-1312-c5a6-f795370374a7" [ 881.256071] env[62914]: _type = "Task" [ 881.256071] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.270254] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6e39-75c4-1312-c5a6-f795370374a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.436014] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832030, 'name': ReconfigVM_Task, 'duration_secs': 0.354042} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.439372] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Reconfigured VM instance instance-00000045 to attach disk [datastore2] d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f/d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.442901] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8899f1f1-7380-4bf4-96c1-4a0097a076f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.453633] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 881.453633] env[62914]: value = "task-4832031" [ 881.453633] env[62914]: _type = "Task" [ 881.453633] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.462327] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832031, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.525115] env[62914]: INFO nova.compute.manager [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Took 47.16 seconds to build instance. [ 881.557646] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.558468] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.733747] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5339985f-f24c-469d-a990-d6f15dea4030 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.743967] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a0b2af-fbf1-4eb3-bd68-bb00c239496b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.785141] env[62914]: DEBUG nova.network.neutron [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updated VIF entry in instance network info cache for port aac4511b-c3e4-44ce-8c77-1400d0526d22. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 881.785556] env[62914]: DEBUG nova.network.neutron [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [{"id": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "address": "fa:16:3e:ad:a6:42", "network": {"id": "7e3a1d4e-0bcc-42e4-9115-93a50e094f9e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-217011003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cda9ee54ad14f479838a54276dac349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac4511b-c3", "ovs_interfaceid": "aac4511b-c3e4-44ce-8c77-1400d0526d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.790605] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ab8c4a-3a2b-4a9d-a146-5df9076e8dfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.804377] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521a6e39-75c4-1312-c5a6-f795370374a7, 'name': SearchDatastore_Task, 'duration_secs': 0.07113} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.805744] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ef89f4-2493-4673-a75f-5b83343ba3ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.812310] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.812601] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.812874] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.813043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.813235] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.813834] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7824a4d2-ee36-48aa-bcc6-bd65c44b052b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.828041] env[62914]: DEBUG nova.compute.provider_tree [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.830546] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.830767] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 881.831902] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeafc11f-f9c1-4c77-bc38-379611324ec3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.834613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.834869] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.835095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.835557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.835557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.838326] env[62914]: INFO nova.compute.manager [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Terminating instance [ 881.840417] env[62914]: DEBUG nova.compute.manager [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 881.840664] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.841533] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0904d6c3-d76c-4b5f-9143-e9e50cfa8aa6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.846543] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 881.846543] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529bcf9a-6f3f-3d79-5230-da252e5c20dc" [ 881.846543] env[62914]: _type = "Task" [ 881.846543] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.852761] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 881.853424] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-756b54c1-e070-4fa1-884e-3ccf35384809 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.858534] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529bcf9a-6f3f-3d79-5230-da252e5c20dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.863652] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 881.863652] env[62914]: value = "task-4832032" [ 881.863652] env[62914]: _type = "Task" [ 881.863652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.872894] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.964801] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832031, 'name': Rename_Task, 'duration_secs': 0.248737} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.965268] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 881.965673] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a2e1fac-84ad-4e40-9dc1-29f8b327b207 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.975700] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 881.975700] env[62914]: value = "task-4832033" [ 881.975700] env[62914]: _type = "Task" [ 881.975700] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.986407] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.027585] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70739561-06ab-460e-9777-5cf164245f1e tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.946s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.152217] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 882.295637] env[62914]: DEBUG oslo_concurrency.lockutils [req-556b1872-c819-477c-ba4d-e3f91d6490b7 req-f2aeddb2-bff0-4704-b72e-041b2d4207bf service nova] Releasing lock "refresh_cache-c488ba7b-68cc-4876-934f-a11d33fca6ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.333667] env[62914]: DEBUG nova.scheduler.client.report [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.358782] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529bcf9a-6f3f-3d79-5230-da252e5c20dc, 'name': SearchDatastore_Task, 'duration_secs': 0.020843} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.359659] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41d12e88-0679-445b-b48e-01bcfec50f89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.369816] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 882.369816] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291a1e3-8271-2246-ecab-5248507416cd" [ 882.369816] env[62914]: _type = "Task" [ 882.369816] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.377693] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832032, 'name': PowerOffVM_Task, 'duration_secs': 0.275003} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.379100] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 882.379436] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 882.379706] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f2ed413-3365-4c32-8b5e-30dbee61e6bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.385864] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291a1e3-8271-2246-ecab-5248507416cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.462220] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 882.462220] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 882.462220] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleting the datastore file [datastore2] c488ba7b-68cc-4876-934f-a11d33fca6ab {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.462220] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1e577b1-c445-4022-b621-1727703a9a9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.469552] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 882.469552] env[62914]: value = "task-4832035" [ 882.469552] env[62914]: _type = "Task" [ 882.469552] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.481783] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.492350] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832033, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.526466] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.526466] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.526664] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.526824] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.526878] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.527033] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.527254] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.527419] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.527662] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.527896] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.528136] env[62914]: DEBUG nova.virt.hardware [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.529244] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456f6d80-5b66-40b3-bb7f-8a3172092398 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.532505] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 882.544629] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6558defc-f1f3-44d4-af7d-d0e1db4d3432 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.840204] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.717s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.840433] env[62914]: INFO nova.compute.manager [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Migrating [ 882.847589] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.935s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.849481] env[62914]: INFO nova.compute.claims [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.883670] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291a1e3-8271-2246-ecab-5248507416cd, 'name': SearchDatastore_Task, 'duration_secs': 0.034379} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.884607] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.884891] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] cca4bbf9-8864-4805-b95e-954e6b570eae/cca4bbf9-8864-4805-b95e-954e6b570eae.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 882.885191] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-638db99d-f9ea-41d0-bfbb-0b69e0352fca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.893587] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 882.893587] env[62914]: value = "task-4832036" [ 882.893587] env[62914]: _type = "Task" [ 882.893587] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.899120] env[62914]: DEBUG nova.compute.manager [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-vif-plugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 882.899322] env[62914]: DEBUG oslo_concurrency.lockutils [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.899500] env[62914]: DEBUG oslo_concurrency.lockutils [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.899676] env[62914]: DEBUG oslo_concurrency.lockutils [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.899848] env[62914]: DEBUG nova.compute.manager [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] No waiting events found dispatching network-vif-plugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.900161] env[62914]: WARNING nova.compute.manager [req-99d98e42-f31b-45e3-bd3b-eba9778453cc req-03234e34-9ade-412a-b035-48d66a9256d6 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received unexpected event network-vif-plugged-00706251-f634-4dcb-9705-105152de241f for instance with vm_state building and task_state spawning. [ 882.908344] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.981425] env[62914]: DEBUG oslo_vmware.api [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300175} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.981425] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.981693] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 882.981826] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 882.982015] env[62914]: INFO nova.compute.manager [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Took 1.14 seconds to destroy the instance on the hypervisor. [ 882.982272] env[62914]: DEBUG oslo.service.loopingcall [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.982544] env[62914]: DEBUG nova.compute.manager [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 882.982644] env[62914]: DEBUG nova.network.neutron [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 882.994070] env[62914]: DEBUG oslo_vmware.api [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832033, 'name': PowerOnVM_Task, 'duration_secs': 0.574778} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.994070] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 882.994070] env[62914]: INFO nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Took 8.39 seconds to spawn the instance on the hypervisor. [ 882.994070] env[62914]: DEBUG nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 882.994806] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e0a488-b06f-4811-8b0e-c40a543822f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.060932] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.173872] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Successfully updated port: 00706251-f634-4dcb-9705-105152de241f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.307858] env[62914]: DEBUG nova.compute.manager [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-changed-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 883.308103] env[62914]: DEBUG nova.compute.manager [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing instance network info cache due to event network-changed-00706251-f634-4dcb-9705-105152de241f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 883.308307] env[62914]: DEBUG oslo_concurrency.lockutils [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.308683] env[62914]: DEBUG oslo_concurrency.lockutils [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.308782] env[62914]: DEBUG nova.network.neutron [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing network info cache for port 00706251-f634-4dcb-9705-105152de241f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 883.362489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.362684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.362900] env[62914]: DEBUG nova.network.neutron [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.417016] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832036, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.515947] env[62914]: INFO nova.compute.manager [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Took 43.05 seconds to build instance. [ 883.563132] env[62914]: DEBUG nova.compute.manager [req-808b2566-661d-4023-8f7d-6ca2cc36bf19 req-d8302d5c-f67b-4dab-86e6-177278392eb1 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Received event network-vif-deleted-aac4511b-c3e4-44ce-8c77-1400d0526d22 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 883.563475] env[62914]: INFO nova.compute.manager [req-808b2566-661d-4023-8f7d-6ca2cc36bf19 req-d8302d5c-f67b-4dab-86e6-177278392eb1 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Neutron deleted interface aac4511b-c3e4-44ce-8c77-1400d0526d22; detaching it from the instance and deleting it from the info cache [ 883.563876] env[62914]: DEBUG nova.network.neutron [req-808b2566-661d-4023-8f7d-6ca2cc36bf19 req-d8302d5c-f67b-4dab-86e6-177278392eb1 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.571771] env[62914]: DEBUG nova.compute.manager [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 883.572999] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c866a40-bcbb-420b-95e3-a88100487e4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.681026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.862975] env[62914]: DEBUG nova.network.neutron [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.908514] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.93161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.908835] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] cca4bbf9-8864-4805-b95e-954e6b570eae/cca4bbf9-8864-4805-b95e-954e6b570eae.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 883.909083] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.909708] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90325897-7533-45c6-9f06-7ef2a86dc6eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.922832] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 883.922832] env[62914]: value = "task-4832037" [ 883.922832] env[62914]: _type = "Task" [ 883.922832] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.932058] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.942609] env[62914]: DEBUG nova.network.neutron [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.018917] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ab7dd62a-6aba-45d2-995b-0f6cd309130e tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.613s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.039323] env[62914]: DEBUG nova.network.neutron [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.066075] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-896c58aa-f886-4eef-aeee-54a761e205ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.080286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70f7eab-db3e-4aa3-96d8-a32213afdb3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.103250] env[62914]: INFO nova.compute.manager [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] instance snapshotting [ 884.107284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9718d814-9657-48a8-9b6a-34eb5a2ba806 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.152927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d47dea8-4c0b-4b1d-839a-3d2dc9997f0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.157383] env[62914]: DEBUG nova.compute.manager [req-808b2566-661d-4023-8f7d-6ca2cc36bf19 req-d8302d5c-f67b-4dab-86e6-177278392eb1 service nova] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Detach interface failed, port_id=aac4511b-c3e4-44ce-8c77-1400d0526d22, reason: Instance c488ba7b-68cc-4876-934f-a11d33fca6ab could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 884.206748] env[62914]: DEBUG nova.network.neutron [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [{"id": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "address": "fa:16:3e:72:0c:2d", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7fd222f-01", "ovs_interfaceid": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.435105] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169006} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.435432] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.436337] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7134d877-07b9-4ec2-a84d-5266b141a69c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.454289] env[62914]: INFO nova.compute.manager [-] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Took 1.47 seconds to deallocate network for instance. [ 884.463548] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] cca4bbf9-8864-4805-b95e-954e6b570eae/cca4bbf9-8864-4805-b95e-954e6b570eae.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.466441] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01b1399e-026c-4d1d-bb80-1fbaab2051ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.481208] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d24c66c-00b8-4fe4-8610-d2c50408bb41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.494894] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307bf8f3-bee4-47e5-8ede-54c99d823894 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.498624] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 884.498624] env[62914]: value = "task-4832038" [ 884.498624] env[62914]: _type = "Task" [ 884.498624] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.531022] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 884.535761] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78674cda-55a6-4c4a-8802-af9f6fcf2707 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.542188] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.547120] env[62914]: DEBUG oslo_concurrency.lockutils [req-66e83b4f-01b3-41dd-bebc-7eaf7923b179 req-8ae055dc-f086-4801-a5db-54cfe7801eea service nova] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.547593] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.547768] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 884.549836] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b062cf7-2d77-4b15-a456-c88f700655f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.568029] env[62914]: DEBUG nova.compute.provider_tree [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.668375] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 884.668843] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b030f467-567e-4f1d-a749-cdbb7100dcdf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.679017] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 884.679017] env[62914]: value = "task-4832039" [ 884.679017] env[62914]: _type = "Task" [ 884.679017] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.687223] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832039, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.710147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.991948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.009972] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.061292] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.069717] env[62914]: DEBUG nova.scheduler.client.report [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.093966] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 885.195658] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832039, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.247522] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 885.248876] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae38c4bb-7b9f-40af-9e76-380ab1692d25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.258386] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 885.258591] env[62914]: ERROR oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk due to incomplete transfer. [ 885.258918] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c2ff1423-2eb0-4a2e-8147-60741506ca85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.269216] env[62914]: DEBUG oslo_vmware.rw_handles [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5239333a-c1ac-f9eb-32ff-e33c9d8f69a4/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 885.269454] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Uploaded image 2ebc6dd3-004e-4687-828d-5c558c734f0f to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 885.272180] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 885.273318] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-40e23d53-6497-43ef-812d-35be0df601ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.282791] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 885.282791] env[62914]: value = "task-4832040" [ 885.282791] env[62914]: _type = "Task" [ 885.282791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.295418] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832040, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.297977] env[62914]: DEBUG nova.network.neutron [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.510907] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832038, 'name': ReconfigVM_Task, 'duration_secs': 0.719909} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.511210] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfigured VM instance instance-00000046 to attach disk [datastore1] cca4bbf9-8864-4805-b95e-954e6b570eae/cca4bbf9-8864-4805-b95e-954e6b570eae.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.511916] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edd7ee75-b412-4793-bc69-404803792efb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.519359] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 885.519359] env[62914]: value = "task-4832041" [ 885.519359] env[62914]: _type = "Task" [ 885.519359] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.529407] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832041, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.550154] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.550533] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.550793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.550992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.551236] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.553731] env[62914]: INFO nova.compute.manager [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Terminating instance [ 885.556032] env[62914]: DEBUG nova.compute.manager [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 885.556032] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 885.556870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbc6a20-0ef6-4408-acc1-4d9aa4ea257f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.565201] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 885.565574] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a3b0696-1c40-4334-94ad-aa5ba0dbe35b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.573357] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 885.573357] env[62914]: value = "task-4832042" [ 885.573357] env[62914]: _type = "Task" [ 885.573357] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.574390] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.574931] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 885.581498] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.668s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.583394] env[62914]: INFO nova.compute.claims [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.593398] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.689121] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832039, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.794786] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832040, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.801320] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.801852] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance network_info: |[{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 885.802351] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a2:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '456bd8a2-0fb6-4b17-9d25-08e7995c5184', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00706251-f634-4dcb-9705-105152de241f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 885.810445] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating folder: Project (3ffdaa966ecb4979845fda7778c7fb45). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 885.810793] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-631f09fc-c541-433b-bc0e-885f32290742 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.821976] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created folder: Project (3ffdaa966ecb4979845fda7778c7fb45) in parent group-v941773. [ 885.822220] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating folder: Instances. Parent ref: group-v941970. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 885.822503] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6d81db4-2256-49b5-94b1-39161216ce3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.832627] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created folder: Instances in parent group-v941970. [ 885.832904] env[62914]: DEBUG oslo.service.loopingcall [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.833144] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 885.833371] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19c606a-ce82-470a-ac00-01bf610c2fa2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.854429] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.854429] env[62914]: value = "task-4832045" [ 885.854429] env[62914]: _type = "Task" [ 885.854429] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.862968] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832045, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.035969] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832041, 'name': Rename_Task, 'duration_secs': 0.281303} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.036409] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 886.036771] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9fef55d-04dc-49d6-928b-54002ffca2fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.045014] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 886.045014] env[62914]: value = "task-4832046" [ 886.045014] env[62914]: _type = "Task" [ 886.045014] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.057723] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.083127] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832042, 'name': PowerOffVM_Task, 'duration_secs': 0.194644} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.083470] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 886.083655] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 886.083929] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-775b701a-c75a-490d-b7f3-7109937f0d8c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.088892] env[62914]: DEBUG nova.compute.utils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.095889] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 886.095889] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.158965] env[62914]: DEBUG nova.policy [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 886.166034] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 886.166182] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 886.166273] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Deleting the datastore file [datastore2] d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 886.166563] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea248b73-3f93-43a4-9d0d-2009905438a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.174321] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for the task: (returnval){ [ 886.174321] env[62914]: value = "task-4832048" [ 886.174321] env[62914]: _type = "Task" [ 886.174321] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.185738] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.191593] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832039, 'name': CreateSnapshot_Task, 'duration_secs': 1.030761} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.191928] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 886.194459] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405513e4-18c9-4f15-8813-3e6cae6faf2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.231088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220d0fde-6c7a-49fa-bb61-40064ce1fab3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.252110] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 886.294789] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832040, 'name': Destroy_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.364973] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832045, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.480029] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Successfully created port: 23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.555985] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832046, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.594966] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 886.689067] env[62914]: DEBUG oslo_vmware.api [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Task: {'id': task-4832048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375971} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.689366] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.689815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 886.689815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 886.689979] env[62914]: INFO nova.compute.manager [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 886.690445] env[62914]: DEBUG oslo.service.loopingcall [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.690543] env[62914]: DEBUG nova.compute.manager [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 886.690609] env[62914]: DEBUG nova.network.neutron [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 886.715084] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 886.716062] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-16e70a81-ea1f-4c0e-aceb-d55ffdb84871 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.733070] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 886.733070] env[62914]: value = "task-4832049" [ 886.733070] env[62914]: _type = "Task" [ 886.733070] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.748028] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832049, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.759290] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 886.759290] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a2480c0-3135-4ee3-a02a-2a570c79ceca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.769657] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 886.769657] env[62914]: value = "task-4832050" [ 886.769657] env[62914]: _type = "Task" [ 886.769657] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.780565] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.796246] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832040, 'name': Destroy_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.871385] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832045, 'name': CreateVM_Task, 'duration_secs': 0.587435} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.871735] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 886.872545] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.872545] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.872883] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.875960] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0a2ff5b-4364-44d9-8ec8-6b7d13b43478 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.882288] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 886.882288] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d72678-1571-ceef-a01d-d5994b087610" [ 886.882288] env[62914]: _type = "Task" [ 886.882288] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.891727] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d72678-1571-ceef-a01d-d5994b087610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.057188] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832046, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.165944] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d25271c-9392-41ae-9224-c5822f9aed46 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.178450] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9f5dda-94fe-4266-b7e8-38c53298322c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.215669] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af2bfe6-9dc6-483e-9f46-77233de81f04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.225202] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b55dcf-ebc7-4c76-b218-72500ea8c8e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.249545] env[62914]: DEBUG nova.compute.provider_tree [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.259607] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832049, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.283652] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832050, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.293596] env[62914]: DEBUG nova.compute.manager [req-21c7ea2b-36e0-4387-a73f-a3cfaea0b6ce req-b1889476-6328-44d2-b84a-c1b141fac077 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Received event network-vif-deleted-1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 887.293829] env[62914]: INFO nova.compute.manager [req-21c7ea2b-36e0-4387-a73f-a3cfaea0b6ce req-b1889476-6328-44d2-b84a-c1b141fac077 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Neutron deleted interface 1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0; detaching it from the instance and deleting it from the info cache [ 887.294460] env[62914]: DEBUG nova.network.neutron [req-21c7ea2b-36e0-4387-a73f-a3cfaea0b6ce req-b1889476-6328-44d2-b84a-c1b141fac077 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.299462] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832040, 'name': Destroy_Task, 'duration_secs': 1.704681} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.300473] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Destroyed the VM [ 887.300763] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.301097] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-eb92ea4f-a50d-4e85-8abf-defa785f77e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.310676] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 887.310676] env[62914]: value = "task-4832051" [ 887.310676] env[62914]: _type = "Task" [ 887.310676] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.327295] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832051, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.394221] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d72678-1571-ceef-a01d-d5994b087610, 'name': SearchDatastore_Task, 'duration_secs': 0.009978} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.394416] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.394730] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.394989] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.395228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.395437] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.395718] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c9ead23-e633-4bd5-8005-3304bb115ccd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.405723] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.405945] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 887.406745] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24bac66a-2cea-4765-a566-f50c1c7dfc07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.412759] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 887.412759] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c000f-d019-6961-de26-e42aec7d66d5" [ 887.412759] env[62914]: _type = "Task" [ 887.412759] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.422350] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c000f-d019-6961-de26-e42aec7d66d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.557687] env[62914]: DEBUG oslo_vmware.api [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832046, 'name': PowerOnVM_Task, 'duration_secs': 1.281805} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.558336] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 887.558336] env[62914]: INFO nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Took 10.33 seconds to spawn the instance on the hypervisor. [ 887.558534] env[62914]: DEBUG nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 887.559316] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db9e594-51aa-45ea-9f7c-59398cba3e31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.613143] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 887.637620] env[62914]: DEBUG nova.network.neutron [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.647780] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.647945] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.648127] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.648323] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.648549] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.648772] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.648999] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.649204] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.649386] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.649607] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.649814] env[62914]: DEBUG nova.virt.hardware [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.650772] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0baeb4-2bae-43a3-ba0c-921de0b8f6ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.661356] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3183a5-f995-4799-858f-e04dc54e5f32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.748080] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832049, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.753605] env[62914]: DEBUG nova.scheduler.client.report [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.784574] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832050, 'name': PowerOffVM_Task, 'duration_secs': 0.558446} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.785167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 887.785364] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 887.801247] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b1dd604-1897-4d18-8ac1-17344fd86e7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.811781] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d723c615-8e0d-4f93-a7e3-f17ef0c92216 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.840901] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832051, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.859058] env[62914]: DEBUG nova.compute.manager [req-21c7ea2b-36e0-4387-a73f-a3cfaea0b6ce req-b1889476-6328-44d2-b84a-c1b141fac077 service nova] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Detach interface failed, port_id=1f51697b-7d1f-4979-85b5-8e3b0ccc5fe0, reason: Instance d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 887.924063] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527c000f-d019-6961-de26-e42aec7d66d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.924958] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1840369-27f6-4065-b403-6d66fdd48399 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.932319] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 887.932319] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52029e61-54b3-5234-4704-9a67c8082dfd" [ 887.932319] env[62914]: _type = "Task" [ 887.932319] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.941536] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52029e61-54b3-5234-4704-9a67c8082dfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.081130] env[62914]: INFO nova.compute.manager [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Took 45.22 seconds to build instance. [ 888.109520] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Successfully updated port: 23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.140312] env[62914]: INFO nova.compute.manager [-] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Took 1.45 seconds to deallocate network for instance. [ 888.147869] env[62914]: DEBUG nova.compute.manager [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received event network-vif-plugged-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 888.148354] env[62914]: DEBUG oslo_concurrency.lockutils [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] Acquiring lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.148354] env[62914]: DEBUG oslo_concurrency.lockutils [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.148531] env[62914]: DEBUG oslo_concurrency.lockutils [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.148711] env[62914]: DEBUG nova.compute.manager [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] No waiting events found dispatching network-vif-plugged-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 888.148950] env[62914]: WARNING nova.compute.manager [req-422cd7d7-9883-4de9-a7ab-4df74a817170 req-4eb1262f-59cf-4485-9ea0-3ac01338a33b service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received unexpected event network-vif-plugged-23732df6-58dd-4637-9c04-c25d6b049c91 for instance with vm_state building and task_state spawning. [ 888.245472] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832049, 'name': CloneVM_Task, 'duration_secs': 1.284927} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.245581] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Created linked-clone VM from snapshot [ 888.246391] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81422a85-55c7-4245-bb43-2f3c586e12d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.255238] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Uploading image 10c30911-1fcc-4a60-90d8-e8e9e7c20b92 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 888.259075] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.259587] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 888.262254] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.009s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.262489] env[62914]: DEBUG nova.objects.instance [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lazy-loading 'resources' on Instance uuid 397c5401-a435-4170-b07d-a03488c73867 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.284084] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 888.284084] env[62914]: value = "vm-941973" [ 888.284084] env[62914]: _type = "VirtualMachine" [ 888.284084] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 888.284730] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7026ef0e-6645-4839-af2c-ada7748f8d7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.293892] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.293892] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.294046] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.294352] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.294644] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.295178] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.295546] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.295584] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.295807] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.296057] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.296351] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.304855] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be098bcb-0876-460b-96d0-2902b8b4ff96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.316434] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease: (returnval){ [ 888.316434] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244f5db-3948-dc55-16f5-e4b9f4d06ad1" [ 888.316434] env[62914]: _type = "HttpNfcLease" [ 888.316434] env[62914]: } obtained for exporting VM: (result){ [ 888.316434] env[62914]: value = "vm-941973" [ 888.316434] env[62914]: _type = "VirtualMachine" [ 888.316434] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 888.316709] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the lease: (returnval){ [ 888.316709] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244f5db-3948-dc55-16f5-e4b9f4d06ad1" [ 888.316709] env[62914]: _type = "HttpNfcLease" [ 888.316709] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 888.323717] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 888.323717] env[62914]: value = "task-4832053" [ 888.323717] env[62914]: _type = "Task" [ 888.323717] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.329147] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 888.329147] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244f5db-3948-dc55-16f5-e4b9f4d06ad1" [ 888.329147] env[62914]: _type = "HttpNfcLease" [ 888.329147] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 888.332511] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 888.332511] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5244f5db-3948-dc55-16f5-e4b9f4d06ad1" [ 888.332511] env[62914]: _type = "HttpNfcLease" [ 888.332511] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 888.333832] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c7280a-357e-489b-8b13-c267655b45ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.340702] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832051, 'name': RemoveSnapshot_Task, 'duration_secs': 0.945771} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.342037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 888.342037] env[62914]: DEBUG nova.compute.manager [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 888.342514] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3400d8b6-14c7-4da8-bb38-2b4d05f920e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.352140] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.353056] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 888.353056] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 888.443843] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52029e61-54b3-5234-4704-9a67c8082dfd, 'name': SearchDatastore_Task, 'duration_secs': 0.016029} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.444154] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.444435] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 888.444727] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81d25e43-7a5c-48ea-9813-d562c30de855 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.454318] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 888.454318] env[62914]: value = "task-4832054" [ 888.454318] env[62914]: _type = "Task" [ 888.454318] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.466128] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1a034821-b01d-476a-bf2c-b2af5414a3b5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.467972] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832054, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.584044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fe1705-c242-47e2-ba02-8c43dda12d4c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.830s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.612543] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.612771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.612957] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.654605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.766578] env[62914]: DEBUG nova.compute.utils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.771574] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 888.771881] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 888.844222] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832053, 'name': ReconfigVM_Task, 'duration_secs': 0.312714} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.844696] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 888.923019] env[62914]: INFO nova.compute.manager [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Shelve offloading [ 888.930826] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 888.931990] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff26188b-c450-461b-82c8-c74c19fc96a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.943192] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 888.943192] env[62914]: value = "task-4832055" [ 888.943192] env[62914]: _type = "Task" [ 888.943192] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.963552] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 888.963833] env[62914]: DEBUG nova.compute.manager [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 888.967591] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f6bca5-f5dd-4a32-b3a6-0d3609171cae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.976200] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832054, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.982544] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.982544] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.982690] env[62914]: DEBUG nova.network.neutron [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 889.058573] env[62914]: DEBUG nova.policy [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 889.161932] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.281642] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 889.318080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f87bf08-2075-4180-84fd-38ff36d24eed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.334173] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69967e40-2b24-4fc8-b9f2-ddf7055b2428 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.372762] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.373186] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.373433] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.373840] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.374037] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.374366] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.374535] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.374719] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.374964] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.375162] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.375361] env[62914]: DEBUG nova.virt.hardware [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.382255] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 889.386296] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76eef93a-67fa-4dfa-b82e-42ce5eff1fbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.402254] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebbc595-74bf-410c-8f02-38e97586e458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.414524] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b96773-7990-4d8a-8c01-2aafc7a64706 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.419383] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 889.419383] env[62914]: value = "task-4832056" [ 889.419383] env[62914]: _type = "Task" [ 889.419383] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.433027] env[62914]: DEBUG nova.compute.provider_tree [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.441274] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.465936] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832054, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621304} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.466601] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 889.466712] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.469367] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2f21de7-5ea1-4669-99d5-92e587e6aa6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.478119] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 889.478119] env[62914]: value = "task-4832057" [ 889.478119] env[62914]: _type = "Task" [ 889.478119] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.487267] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.531311] env[62914]: DEBUG nova.network.neutron [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.673559] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Successfully created port: 9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.934761] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832056, 'name': ReconfigVM_Task, 'duration_secs': 0.365157} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.936592] env[62914]: DEBUG nova.scheduler.client.report [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.943918] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 889.946653] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815742e9-60e0-4cfd-9098-08b1020d662f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.977800] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.978723] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c79fb1f-67cc-431c-bcf3-c0274d104245 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.001697] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06844} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.003158] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.003591] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 890.003591] env[62914]: value = "task-4832058" [ 890.003591] env[62914]: _type = "Task" [ 890.003591] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.004318] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b723d2f-cc42-4970-9729-af1fd922e47d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.033201] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.037326] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8299eab5-751d-4899-b1c8-933236ac69df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.055058] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.055548] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Instance network_info: |[{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 890.056850] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832058, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.057244] env[62914]: DEBUG nova.network.neutron [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.059218] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:92:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23732df6-58dd-4637-9c04-c25d6b049c91', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.070183] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating folder: Project (d141c01c1d5848eea6ef2b831e431ba5). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 890.070183] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eba3e122-b137-476e-9014-a419cbebe058 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.075571] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 890.075571] env[62914]: value = "task-4832059" [ 890.075571] env[62914]: _type = "Task" [ 890.075571] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.080846] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created folder: Project (d141c01c1d5848eea6ef2b831e431ba5) in parent group-v941773. [ 890.081140] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating folder: Instances. Parent ref: group-v941974. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 890.081993] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-325fa43e-4d95-4080-ac90-46c281e3bb94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.087888] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.097561] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created folder: Instances in parent group-v941974. [ 890.098036] env[62914]: DEBUG oslo.service.loopingcall [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.098235] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 890.098622] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d593b41-f090-44b9-a876-d12e73164431 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.121393] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.121393] env[62914]: value = "task-4832062" [ 890.121393] env[62914]: _type = "Task" [ 890.121393] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.130584] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832062, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.178457] env[62914]: DEBUG nova.compute.manager [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 890.178693] env[62914]: DEBUG nova.compute.manager [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing instance network info cache due to event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 890.178939] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Acquiring lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.179335] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Acquired lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.179335] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 890.289618] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 890.320178] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 890.320992] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 890.321115] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 890.323859] env[62914]: DEBUG nova.virt.hardware [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 890.323859] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be898da4-8829-4a78-9ce1-b78808268385 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.333930] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6488f118-11de-4cc2-9c79-c8cb92958b99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.447566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.451758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.253s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.451997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.454478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.259s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.454673] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.457057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.075s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.458952] env[62914]: INFO nova.compute.claims [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.482097] env[62914]: INFO nova.scheduler.client.report [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleted allocations for instance 397c5401-a435-4170-b07d-a03488c73867 [ 890.492851] env[62914]: INFO nova.scheduler.client.report [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleted allocations for instance 4cea2bd1-a238-4fb6-bc47-719894461228 [ 890.507761] env[62914]: INFO nova.scheduler.client.report [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted allocations for instance 2f7bc586-af68-4d9d-81e2-8247371dfa7f [ 890.525440] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.569355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.586647] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.634088] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832062, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.996774] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13372324-46de-4d5d-b101-cb5e50647efb tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.547s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.997664] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 30.056s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.997906] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "397c5401-a435-4170-b07d-a03488c73867-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.998160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.998339] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.002541] env[62914]: INFO nova.compute.manager [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Terminating instance [ 891.013134] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updated VIF entry in instance network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 891.013839] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.018898] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.019087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquired lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.020730] env[62914]: DEBUG nova.network.neutron [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 891.023543] env[62914]: DEBUG oslo_concurrency.lockutils [None req-714a02d9-838d-459a-9840-be961404f249 tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "4cea2bd1-a238-4fb6-bc47-719894461228" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.951s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.037094] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ec408719-407f-4b29-903f-e15ea3d683dc tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "2f7bc586-af68-4d9d-81e2-8247371dfa7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.008s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.046022] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832058, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.082541] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 891.083656] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d88c0e2-9f48-4176-96aa-13a1532aa51d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.093384] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.097405] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9b87835-a990-4d2e-97c3-3d5350cc2b80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.100474] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832059, 'name': ReconfigVM_Task, 'duration_secs': 0.56475} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.100474] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.101394] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9744a24-1c0f-417d-8d85-c8c7e75c51f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.108646] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 891.108646] env[62914]: value = "task-4832064" [ 891.108646] env[62914]: _type = "Task" [ 891.108646] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.120023] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832064, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.136101] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832062, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.182131] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.182518] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.182734] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.183162] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5a20278-27e2-4182-89de-f6e2b96b9403 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.189960] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 891.189960] env[62914]: value = "task-4832065" [ 891.189960] env[62914]: _type = "Task" [ 891.189960] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.199849] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.393479] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Successfully updated port: 9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 891.532769] env[62914]: DEBUG nova.compute.utils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Can not refresh info_cache because instance was not found {{(pid=62914) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 891.535485] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Releasing lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.535814] env[62914]: DEBUG nova.compute.manager [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 891.536008] env[62914]: DEBUG nova.compute.manager [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing instance network info cache due to event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 891.536233] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.536400] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.536568] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 891.544492] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832058, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.555223] env[62914]: DEBUG nova.network.neutron [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 891.626410] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832064, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.643072] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832062, 'name': CreateVM_Task, 'duration_secs': 1.360393} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.643072] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 891.644121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.644121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.644121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 891.644403] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f009dbe3-8557-4aad-8dfa-1ad903de02ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.651285] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 891.651285] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52073f74-468a-5843-7c0d-47d6d29a2abb" [ 891.651285] env[62914]: _type = "Task" [ 891.651285] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.659052] env[62914]: DEBUG nova.network.neutron [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.664070] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52073f74-468a-5843-7c0d-47d6d29a2abb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.704051] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.896504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.896753] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.896828] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 892.001092] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7fd5b0-e3ea-48b3-a56a-8099b2222fd8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.012366] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4808ff1-928e-4743-a6e5-0c831a02e323 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.056061] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a8b3b5-63f0-48d9-a870-f61835e6ae16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.070261] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832058, 'name': ReconfigVM_Task, 'duration_secs': 1.909767} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.070611] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c/557c0538-fc4a-403a-a9cb-b706e2260b1c.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.071035] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 892.076937] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0f9bbc-d5df-4e15-9aee-2739860fc777 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.094672] env[62914]: DEBUG nova.compute.provider_tree [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.121755] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832064, 'name': Rename_Task, 'duration_secs': 1.009941} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.122096] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 892.122351] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35a9fd42-e5ac-430f-955a-d446088cd4d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.130264] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 892.130264] env[62914]: value = "task-4832066" [ 892.130264] env[62914]: _type = "Task" [ 892.130264] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.140945] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.163458] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52073f74-468a-5843-7c0d-47d6d29a2abb, 'name': SearchDatastore_Task, 'duration_secs': 0.025417} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.164017] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.164291] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.164545] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.164705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.164959] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.165429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Releasing lock "refresh_cache-397c5401-a435-4170-b07d-a03488c73867" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.165805] env[62914]: DEBUG nova.compute.manager [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 892.166200] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 892.166478] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6422b220-c30f-4522-a0db-65fa04fa7bd6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.168476] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6d824a5-a5e5-435e-9f67-ff85be519d92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.181029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3a407a-265d-4d84-a829-56b4e532d5a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.198746] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.198988] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 892.204158] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df4a281b-1972-428c-88dc-d496b8882d1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.210958] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 892.210958] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ccf288-b69c-194f-f107-1f2d3ddb67b5" [ 892.210958] env[62914]: _type = "Task" [ 892.210958] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.214985] env[62914]: DEBUG oslo_vmware.api [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.692761} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.232678] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.233081] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.233192] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.235492] env[62914]: WARNING nova.virt.vmwareapi.vmops [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 397c5401-a435-4170-b07d-a03488c73867 could not be found. [ 892.236361] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.236361] env[62914]: INFO nova.compute.manager [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 0.07 seconds to destroy the instance on the hypervisor. [ 892.236361] env[62914]: DEBUG oslo.service.loopingcall [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.239330] env[62914]: DEBUG nova.compute.manager [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 892.239442] env[62914]: DEBUG nova.network.neutron [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.248132] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ccf288-b69c-194f-f107-1f2d3ddb67b5, 'name': SearchDatastore_Task, 'duration_secs': 0.022827} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.249057] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5577f9-62d4-45e0-a0c7-15c5e105e36c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.257065] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 892.257065] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52845a30-9317-836c-6df8-b3a411459b27" [ 892.257065] env[62914]: _type = "Task" [ 892.257065] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.264091] env[62914]: INFO nova.scheduler.client.report [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted allocations for instance dc99b470-4334-408d-8853-d2e9b9204d04 [ 892.270123] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52845a30-9317-836c-6df8-b3a411459b27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.271229] env[62914]: DEBUG nova.network.neutron [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.359506] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updated VIF entry in instance network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 892.360019] env[62914]: DEBUG nova.network.neutron [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.434265] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.585709] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61eea4c-5340-4a79-a152-c9eef16a50eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.607789] env[62914]: DEBUG nova.scheduler.client.report [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 892.619543] env[62914]: DEBUG nova.network.neutron [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Updating instance_info_cache with network_info: [{"id": "9be7ced3-94cd-47b2-8902-92429978705c", "address": "fa:16:3e:00:67:de", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9be7ced3-94", "ovs_interfaceid": "9be7ced3-94cd-47b2-8902-92429978705c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.619543] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66036e3-c5e5-43fc-8986-e213fe54685b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.645610] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 892.660473] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.768900] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52845a30-9317-836c-6df8-b3a411459b27, 'name': SearchDatastore_Task, 'duration_secs': 0.025401} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.768900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.769183] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6/7aa4401b-60e5-41b8-b4de-b4fb5ab799c6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 892.769496] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65f6b806-ab02-4c14-bc4f-a8d440acd50f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.772376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.773144] env[62914]: DEBUG nova.network.neutron [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.778830] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 892.778830] env[62914]: value = "task-4832067" [ 892.778830] env[62914]: _type = "Task" [ 892.778830] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.788727] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.862551] env[62914]: DEBUG oslo_concurrency.lockutils [req-97107511-15a1-4fe5-94bd-50e0026897d4 req-73143256-8fc9-4642-a916-adf95c9b7266 service nova] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.939101] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-vif-unplugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 892.939440] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.939552] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.939732] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.939910] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] No waiting events found dispatching network-vif-unplugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 892.940105] env[62914]: WARNING nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received unexpected event network-vif-unplugged-c3221de3-00d5-45e7-af68-04297360fbcf for instance with vm_state shelved_offloaded and task_state None. [ 892.940285] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 892.940449] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing instance network info cache due to event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 892.940704] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.940819] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.941018] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 893.075819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.076201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.119309] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.120033] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Instance network_info: |[{"id": "9be7ced3-94cd-47b2-8902-92429978705c", "address": "fa:16:3e:00:67:de", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9be7ced3-94", "ovs_interfaceid": "9be7ced3-94cd-47b2-8902-92429978705c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 893.120375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.120998] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 893.124470] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:67:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9be7ced3-94cd-47b2-8902-92429978705c', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.132597] env[62914]: DEBUG oslo.service.loopingcall [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.132894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.420s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.133133] env[62914]: DEBUG nova.objects.instance [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lazy-loading 'resources' on Instance uuid 32e8f18e-2116-43bd-9951-ad809ab95ba2 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.134807] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 893.135223] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca5f539b-1843-4de1-80ca-14e504e93f72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.164078] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.164078] env[62914]: value = "task-4832068" [ 893.164078] env[62914]: _type = "Task" [ 893.164078] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.170266] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.179700] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832068, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.204059] env[62914]: DEBUG nova.network.neutron [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Port e7fd222f-0127-4616-8d16-801a8c35a0d1 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 893.276051] env[62914]: INFO nova.compute.manager [-] [instance: 397c5401-a435-4170-b07d-a03488c73867] Took 1.04 seconds to deallocate network for instance. [ 893.295949] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832067, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.330040] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.330411] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.330957] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.331192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.331659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.334930] env[62914]: INFO nova.compute.manager [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Terminating instance [ 893.337425] env[62914]: DEBUG nova.compute.manager [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 893.337763] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.338745] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ed1a22-d7c8-46b3-afd8-7e10aad6ef51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.348916] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 893.349271] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32b34e38-cebc-4b9c-a8e2-1c2867b82a89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.360138] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 893.360138] env[62914]: value = "task-4832069" [ 893.360138] env[62914]: _type = "Task" [ 893.360138] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.373798] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.481956] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.482270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.482483] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.482728] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.482935] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.490186] env[62914]: INFO nova.compute.manager [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Terminating instance [ 893.493760] env[62914]: DEBUG nova.compute.manager [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 893.493865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.494753] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85a1b5e-94f7-4934-845d-feefc9c71f1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.503988] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 893.504174] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a757206c-d3e0-420e-8308-f3a6a0feae86 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.512952] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 893.512952] env[62914]: value = "task-4832070" [ 893.512952] env[62914]: _type = "Task" [ 893.512952] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.525575] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4832070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.578510] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 893.636941] env[62914]: DEBUG nova.compute.utils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.642459] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 893.642661] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.670204] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.684394] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832068, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.776222] env[62914]: DEBUG nova.policy [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4f1342629ac4aee802a2b69a5459827', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ae1b7abf6f24eccb2b44d82687deb76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 893.788602] env[62914]: INFO nova.compute.manager [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance disappeared during terminate [ 893.788602] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5be9255c-cd11-4c94-8dad-1ea78d343069 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "397c5401-a435-4170-b07d-a03488c73867" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.791s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.794133] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.804768} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.794819] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6/7aa4401b-60e5-41b8-b4de-b4fb5ab799c6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 893.795242] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.795897] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-489f2b73-e2b2-4ea5-b79d-46c58dd85555 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.808830] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 893.808830] env[62914]: value = "task-4832071" [ 893.808830] env[62914]: _type = "Task" [ 893.808830] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.827024] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.881459] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832069, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.907755] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updated VIF entry in instance network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 893.908226] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc3221de3-00", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.023713] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4832070, 'name': PowerOffVM_Task, 'duration_secs': 0.238029} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.024085] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 894.024442] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 894.024676] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11ef1f92-17c2-4f8c-90bf-0ce96d10ac65 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.096278] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 894.096278] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 894.096278] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleting the datastore file [datastore2] aede8da7-8bf2-4963-b08b-6e06007614a5 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.096781] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f29d218-2bf4-436b-b3ad-0ec9ed3e708a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.104893] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for the task: (returnval){ [ 894.104893] env[62914]: value = "task-4832073" [ 894.104893] env[62914]: _type = "Task" [ 894.104893] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.109709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.118055] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4832073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.146636] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 894.165151] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.181708] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832068, 'name': CreateVM_Task, 'duration_secs': 0.682145} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.181818] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 894.182947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.182947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.183330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 894.183661] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecf0b6a6-8c87-41e1-83b4-2b43f1b7c78a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.191535] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 894.191535] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bb3e41-6f20-4880-78d9-9f29a5ca01f1" [ 894.191535] env[62914]: _type = "Task" [ 894.191535] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.206036] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bb3e41-6f20-4880-78d9-9f29a5ca01f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.227979] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Successfully created port: 5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.238284] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.238553] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.238748] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.263854] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa9b741-5cdd-4938-9bfc-30a215c13829 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.274090] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1371aada-db72-4b25-81e0-b1aaba290580 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.315600] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2710e9c9-eafb-4edb-afa5-9021b093b7e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.327409] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847f8b23-85f5-4624-ae27-1d1f7004b872 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.333631] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071758} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.333631] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.334108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d72cbe-552c-4570-a1bc-a29371cd9d93 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.351380] env[62914]: DEBUG nova.compute.provider_tree [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.374997] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6/7aa4401b-60e5-41b8-b4de-b4fb5ab799c6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.380049] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51c45c49-abf1-4318-ab5e-9adbafc9d7c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.404043] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832069, 'name': PowerOffVM_Task, 'duration_secs': 0.549284} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.404171] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 894.404289] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 894.404586] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 894.404586] env[62914]: value = "task-4832074" [ 894.404586] env[62914]: _type = "Task" [ 894.404586] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.404788] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52233266-c687-4510-a0b4-2d82cc96a041 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.411684] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.412164] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Received event network-vif-plugged-9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 894.412513] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.412842] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.413157] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.413473] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] No waiting events found dispatching network-vif-plugged-9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 894.413769] env[62914]: WARNING nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Received unexpected event network-vif-plugged-9be7ced3-94cd-47b2-8902-92429978705c for instance with vm_state building and task_state spawning. [ 894.414083] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Received event network-changed-9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 894.414382] env[62914]: DEBUG nova.compute.manager [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Refreshing instance network info cache due to event network-changed-9be7ced3-94cd-47b2-8902-92429978705c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 894.414707] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquiring lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.414981] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Acquired lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.415259] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Refreshing network info cache for port 9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 894.421463] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.500390] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 894.500701] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 894.500997] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleting the datastore file [datastore1] 7d8287f9-10be-4834-8b7a-1b764145d1c3 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.503544] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3254d58-68c6-4ff4-82f4-1e6defd4cd87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.510509] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for the task: (returnval){ [ 894.510509] env[62914]: value = "task-4832076" [ 894.510509] env[62914]: _type = "Task" [ 894.510509] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.519202] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.616885] env[62914]: DEBUG oslo_vmware.api [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Task: {'id': task-4832073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366159} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.617191] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.617784] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 894.617784] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 894.617784] env[62914]: INFO nova.compute.manager [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 894.618102] env[62914]: DEBUG oslo.service.loopingcall [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.618332] env[62914]: DEBUG nova.compute.manager [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 894.618478] env[62914]: DEBUG nova.network.neutron [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 894.665078] env[62914]: DEBUG oslo_vmware.api [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832066, 'name': PowerOnVM_Task, 'duration_secs': 2.252343} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.665815] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 894.665815] env[62914]: INFO nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Took 12.51 seconds to spawn the instance on the hypervisor. [ 894.665815] env[62914]: DEBUG nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 894.666610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892f62da-1abf-49c2-8fcd-711b034e9fc2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.706502] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bb3e41-6f20-4880-78d9-9f29a5ca01f1, 'name': SearchDatastore_Task, 'duration_secs': 0.022723} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.709098] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.709371] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.709656] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.709839] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.710042] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.710637] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1f01923-e704-48f1-a856-61b260e00ccf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.724025] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.724025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 894.724025] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f448cb02-fb16-451a-adff-807777163adf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.730804] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 894.730804] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c35712-c046-4aba-cebe-4e8386ab6cab" [ 894.730804] env[62914]: _type = "Task" [ 894.730804] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.741432] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c35712-c046-4aba-cebe-4e8386ab6cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.854460] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.854706] env[62914]: DEBUG nova.scheduler.client.report [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.917802] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832074, 'name': ReconfigVM_Task, 'duration_secs': 0.456511} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.918114] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6/7aa4401b-60e5-41b8-b4de-b4fb5ab799c6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.918823] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3824571e-592e-4000-9e38-c6b8e908d0ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.927020] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 894.927020] env[62914]: value = "task-4832077" [ 894.927020] env[62914]: _type = "Task" [ 894.927020] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.939705] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832077, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.001569] env[62914]: DEBUG nova.compute.manager [req-53eadfb5-0765-4dd1-bdef-fb27f9996aff req-cd3631fd-53c9-4ea5-aedf-4bc4f2c3b6cf service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Received event network-vif-deleted-ae6db457-8035-4a28-bf52-7113144cfe11 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 895.001797] env[62914]: INFO nova.compute.manager [req-53eadfb5-0765-4dd1-bdef-fb27f9996aff req-cd3631fd-53c9-4ea5-aedf-4bc4f2c3b6cf service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Neutron deleted interface ae6db457-8035-4a28-bf52-7113144cfe11; detaching it from the instance and deleting it from the info cache [ 895.002232] env[62914]: DEBUG nova.network.neutron [req-53eadfb5-0765-4dd1-bdef-fb27f9996aff req-cd3631fd-53c9-4ea5-aedf-4bc4f2c3b6cf service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.023269] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.160816] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 895.186210] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.186484] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.186653] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.186846] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.187010] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.187172] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.187391] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.187836] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.187836] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.188075] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.188383] env[62914]: DEBUG nova.virt.hardware [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.189228] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a1a44c-9fe8-4b54-9906-13cce15eceeb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.195092] env[62914]: INFO nova.compute.manager [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Took 46.94 seconds to build instance. [ 895.200631] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf26e936-0c9e-4b01-8363-e883ee298019 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.252026] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c35712-c046-4aba-cebe-4e8386ab6cab, 'name': SearchDatastore_Task, 'duration_secs': 0.020189} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.253166] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50075c2d-f171-4f50-822a-ebe22414cfdc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.263856] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 895.263856] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521394e8-9d91-52e1-135e-665c6fc0c7f7" [ 895.263856] env[62914]: _type = "Task" [ 895.263856] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.274809] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521394e8-9d91-52e1-135e-665c6fc0c7f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.289202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.289202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.289202] env[62914]: DEBUG nova.network.neutron [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 895.345970] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Updated VIF entry in instance network info cache for port 9be7ced3-94cd-47b2-8902-92429978705c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 895.345970] env[62914]: DEBUG nova.network.neutron [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Updating instance_info_cache with network_info: [{"id": "9be7ced3-94cd-47b2-8902-92429978705c", "address": "fa:16:3e:00:67:de", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9be7ced3-94", "ovs_interfaceid": "9be7ced3-94cd-47b2-8902-92429978705c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.365673] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.367721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.633s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.368292] env[62914]: DEBUG nova.objects.instance [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lazy-loading 'resources' on Instance uuid bd81fcb7-abef-4b86-8dce-f07b1c226f2f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.394734] env[62914]: INFO nova.scheduler.client.report [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted allocations for instance 32e8f18e-2116-43bd-9951-ad809ab95ba2 [ 895.440184] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832077, 'name': Rename_Task, 'duration_secs': 0.400408} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.440529] env[62914]: DEBUG nova.network.neutron [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.442506] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 895.443100] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81cae6d1-f990-4240-9f69-a778c841344c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.456090] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 895.456090] env[62914]: value = "task-4832078" [ 895.456090] env[62914]: _type = "Task" [ 895.456090] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.466620] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.505796] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4098104-0781-440b-b105-6d0e4cda6597 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.519337] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc7ab82-fc66-46a2-9b1c-f3188fe607af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.535269] env[62914]: DEBUG oslo_vmware.api [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Task: {'id': task-4832076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.57163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.535269] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.535269] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 895.535772] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 895.535772] env[62914]: INFO nova.compute.manager [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Took 2.20 seconds to destroy the instance on the hypervisor. [ 895.535890] env[62914]: DEBUG oslo.service.loopingcall [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.536070] env[62914]: DEBUG nova.compute.manager [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 895.536171] env[62914]: DEBUG nova.network.neutron [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 895.560234] env[62914]: DEBUG nova.compute.manager [req-53eadfb5-0765-4dd1-bdef-fb27f9996aff req-cd3631fd-53c9-4ea5-aedf-4bc4f2c3b6cf service nova] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Detach interface failed, port_id=ae6db457-8035-4a28-bf52-7113144cfe11, reason: Instance aede8da7-8bf2-4963-b08b-6e06007614a5 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 895.698104] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b6f399-1421-4fe2-8ece-6d2376104316 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.975s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.785122] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521394e8-9d91-52e1-135e-665c6fc0c7f7, 'name': SearchDatastore_Task, 'duration_secs': 0.013107} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.785489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.785781] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 83de3d7c-2308-4678-ae90-a30705f6a8c4/83de3d7c-2308-4678-ae90-a30705f6a8c4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 895.786095] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76ded2be-bf95-4864-a28f-1f2fa5846de4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.795562] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 895.795562] env[62914]: value = "task-4832079" [ 895.795562] env[62914]: _type = "Task" [ 895.795562] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.805690] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.847586] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e93c5db-0361-4290-a82a-4e588a46acf3 req-d75f722a-daf1-458c-be0f-1a7c570302c2 service nova] Releasing lock "refresh_cache-83de3d7c-2308-4678-ae90-a30705f6a8c4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.904046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f3d9e0-fabb-4ff4-97dd-29cb9f207bed tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "32e8f18e-2116-43bd-9951-ad809ab95ba2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.207s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.943438] env[62914]: INFO nova.compute.manager [-] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Took 1.32 seconds to deallocate network for instance. [ 895.977084] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832078, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.066368] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Successfully updated port: 5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.184638] env[62914]: DEBUG nova.network.neutron [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [{"id": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "address": "fa:16:3e:72:0c:2d", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7fd222f-01", "ovs_interfaceid": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.276429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "47aa2783-367e-4445-8261-7c75eb7561ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.278438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.317771] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832079, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.360209] env[62914]: DEBUG nova.compute.manager [req-e764e8c4-f2d2-44ad-a081-1761dfbec68a req-b4f855bd-d48a-46b1-b18f-cd2ce0a90802 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Received event network-vif-deleted-691c01fe-1d59-431c-9474-7726ec537a5b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 896.360209] env[62914]: INFO nova.compute.manager [req-e764e8c4-f2d2-44ad-a081-1761dfbec68a req-b4f855bd-d48a-46b1-b18f-cd2ce0a90802 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Neutron deleted interface 691c01fe-1d59-431c-9474-7726ec537a5b; detaching it from the instance and deleting it from the info cache [ 896.360209] env[62914]: DEBUG nova.network.neutron [req-e764e8c4-f2d2-44ad-a081-1761dfbec68a req-b4f855bd-d48a-46b1-b18f-cd2ce0a90802 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.423757] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7684c6-c78a-4f0e-b27d-c51ae68d3702 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.435887] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93eb5fb-7ef3-469d-a41f-c50073a0afe9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.483452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.488113] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f626dad2-583e-4d04-b70f-6989aab8fe28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.499708] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832078, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.503159] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9850a646-eaa6-4724-8800-a46cc04395e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.519383] env[62914]: DEBUG nova.compute.provider_tree [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.571343] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.571507] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.571667] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 896.578110] env[62914]: DEBUG nova.network.neutron [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.689847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.782959] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 896.808696] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.755385} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.808696] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 83de3d7c-2308-4678-ae90-a30705f6a8c4/83de3d7c-2308-4678-ae90-a30705f6a8c4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 896.808696] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.808696] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d58c567a-5e73-4fab-a068-daa4840e440f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.817466] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 896.817466] env[62914]: value = "task-4832080" [ 896.817466] env[62914]: _type = "Task" [ 896.817466] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.826871] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832080, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.862576] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1582fb2c-2ac6-43a6-a0f3-610c0b180371 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.876239] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6811f54c-7a2e-4484-9efb-755bb566edc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.926869] env[62914]: DEBUG nova.compute.manager [req-e764e8c4-f2d2-44ad-a081-1761dfbec68a req-b4f855bd-d48a-46b1-b18f-cd2ce0a90802 service nova] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Detach interface failed, port_id=691c01fe-1d59-431c-9474-7726ec537a5b, reason: Instance 7d8287f9-10be-4834-8b7a-1b764145d1c3 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 896.994063] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832078, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.023357] env[62914]: DEBUG nova.scheduler.client.report [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 897.030720] env[62914]: DEBUG nova.compute.manager [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Received event network-vif-plugged-5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 897.030936] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.031161] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.031449] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.031707] env[62914]: DEBUG nova.compute.manager [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] No waiting events found dispatching network-vif-plugged-5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 897.031954] env[62914]: WARNING nova.compute.manager [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Received unexpected event network-vif-plugged-5d5caccf-1912-40af-a849-900df4764c6f for instance with vm_state building and task_state spawning. [ 897.032224] env[62914]: DEBUG nova.compute.manager [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Received event network-changed-5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 897.032462] env[62914]: DEBUG nova.compute.manager [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Refreshing instance network info cache due to event network-changed-5d5caccf-1912-40af-a849-900df4764c6f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 897.032679] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.080964] env[62914]: INFO nova.compute.manager [-] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Took 1.54 seconds to deallocate network for instance. [ 897.123403] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.225923] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803ac79d-350b-4ed8-86e2-1382b7e8962c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.248785] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968f70f8-dfc3-40eb-bb3b-924d8236a1b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.259323] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 897.292700] env[62914]: DEBUG nova.network.neutron [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.308086] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.327891] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832080, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11458} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.328232] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.331908] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1a8a36-4a29-4285-9d9d-6ab75eb4d964 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.353904] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 83de3d7c-2308-4678-ae90-a30705f6a8c4/83de3d7c-2308-4678-ae90-a30705f6a8c4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.354273] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bc8438c-f67c-4059-a31d-8069fd85948f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.375820] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 897.375820] env[62914]: value = "task-4832081" [ 897.375820] env[62914]: _type = "Task" [ 897.375820] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.386783] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.495145] env[62914]: DEBUG oslo_vmware.api [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832078, 'name': PowerOnVM_Task, 'duration_secs': 1.76801} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.495326] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 897.495494] env[62914]: INFO nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Took 9.88 seconds to spawn the instance on the hypervisor. [ 897.495684] env[62914]: DEBUG nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 897.496512] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50491e78-884b-4ace-9139-8747b140eea7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.530035] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.162s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.532580] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.439s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.534089] env[62914]: INFO nova.compute.claims [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.550334] env[62914]: INFO nova.scheduler.client.report [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted allocations for instance bd81fcb7-abef-4b86-8dce-f07b1c226f2f [ 897.588382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.765937] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 897.766307] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-807d7c50-0b83-49dd-91d8-fa36ece10567 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.775173] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 897.775173] env[62914]: value = "task-4832082" [ 897.775173] env[62914]: _type = "Task" [ 897.775173] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.787256] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.796079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.796496] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Instance network_info: |[{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 897.796868] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.797310] env[62914]: DEBUG nova.network.neutron [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Refreshing network info cache for port 5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 897.798759] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:81:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c9a12d2-469f-4199-bfaa-f791d765deac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d5caccf-1912-40af-a849-900df4764c6f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.807738] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating folder: Project (5ae1b7abf6f24eccb2b44d82687deb76). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 897.808993] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-280b0309-6fa9-4b14-a5b9-cad70f759306 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.887262] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.889099] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created folder: Project (5ae1b7abf6f24eccb2b44d82687deb76) in parent group-v941773. [ 897.889297] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating folder: Instances. Parent ref: group-v941978. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 897.889547] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dae5b23e-a84d-4450-b37b-a5d391e03c59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.899386] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created folder: Instances in parent group-v941978. [ 897.899657] env[62914]: DEBUG oslo.service.loopingcall [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.899881] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 897.900125] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02982700-6886-4822-b446-62bd866da3ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.921229] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.921229] env[62914]: value = "task-4832085" [ 897.921229] env[62914]: _type = "Task" [ 897.921229] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.931020] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832085, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.014417] env[62914]: INFO nova.compute.manager [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Took 48.14 seconds to build instance. [ 898.058292] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3e0a111-666a-4e59-b420-5ca29e57a2c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "bd81fcb7-abef-4b86-8dce-f07b1c226f2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.525s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.288496] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832082, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.388625] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832081, 'name': ReconfigVM_Task, 'duration_secs': 0.740845} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.388990] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 83de3d7c-2308-4678-ae90-a30705f6a8c4/83de3d7c-2308-4678-ae90-a30705f6a8c4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.389735] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a80d96e7-7b8b-4e68-a4d1-c7c879e27a42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.400491] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 898.400491] env[62914]: value = "task-4832086" [ 898.400491] env[62914]: _type = "Task" [ 898.400491] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.410353] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832086, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.432159] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832085, 'name': CreateVM_Task, 'duration_secs': 0.435806} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.432382] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 898.433122] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.433297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.433637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.433912] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71c74ba9-0cfe-4967-98f9-91f6e2380d15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.439612] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 898.439612] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9ce4d-4ac1-eea0-a7ad-411defa8927e" [ 898.439612] env[62914]: _type = "Task" [ 898.439612] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.451699] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9ce4d-4ac1-eea0-a7ad-411defa8927e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.517670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-20811085-382d-4f8d-8529-edfc38f3e907 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.970s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.562121] env[62914]: DEBUG nova.network.neutron [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updated VIF entry in instance network info cache for port 5d5caccf-1912-40af-a849-900df4764c6f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 898.562604] env[62914]: DEBUG nova.network.neutron [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.788250] env[62914]: DEBUG oslo_vmware.api [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832082, 'name': PowerOnVM_Task, 'duration_secs': 0.824186} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.791405] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 898.791662] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd0d347-f5a1-4f65-888d-dce167873848 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance '557c0538-fc4a-403a-a9cb-b706e2260b1c' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 898.910413] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832086, 'name': Rename_Task, 'duration_secs': 0.241287} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.913192] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 898.914577] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61b7f648-e7ca-4be7-89de-38cff4b52325 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.924652] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 898.924652] env[62914]: value = "task-4832087" [ 898.924652] env[62914]: _type = "Task" [ 898.924652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.936742] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.953330] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a9ce4d-4ac1-eea0-a7ad-411defa8927e, 'name': SearchDatastore_Task, 'duration_secs': 0.018388} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.953597] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.954136] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.954136] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.954344] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.954578] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.954945] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f99e74d6-7481-457c-a1b0-ef229ab97366 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.969764] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.970380] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 898.971281] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-095ce94c-c0f9-4b44-bd32-6577a30bd7d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.981199] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 898.981199] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52479070-549f-1fbe-2b6e-1ba850bc338a" [ 898.981199] env[62914]: _type = "Task" [ 898.981199] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.992480] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52479070-549f-1fbe-2b6e-1ba850bc338a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.004088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83c9573-92f3-43dd-a382-48282e0df0fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.012697] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2bf9a6-2a0d-4f12-b585-d0e148702f58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.046365] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86437b37-ccf3-4b57-b8af-b2c7f3f03880 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.055642] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f872ded-4447-4ed0-a6b1-1ba8b0be750a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.071607] env[62914]: DEBUG oslo_concurrency.lockutils [req-47c6c5fa-4486-4279-97fb-f8fa034173d9 req-9f5bffae-d455-455a-b20d-783436f46408 service nova] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.072238] env[62914]: DEBUG nova.compute.provider_tree [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.386320] env[62914]: DEBUG nova.compute.manager [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 899.387669] env[62914]: DEBUG nova.compute.manager [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing instance network info cache due to event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 899.387669] env[62914]: DEBUG oslo_concurrency.lockutils [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] Acquiring lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.387669] env[62914]: DEBUG oslo_concurrency.lockutils [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] Acquired lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.387669] env[62914]: DEBUG nova.network.neutron [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 899.436841] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.494499] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52479070-549f-1fbe-2b6e-1ba850bc338a, 'name': SearchDatastore_Task, 'duration_secs': 0.014196} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.495389] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f73f85de-3dd2-4034-a140-71b81b331ced {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.502231] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 899.502231] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52208e34-f77b-8766-6b2b-b4c8f9cb88ef" [ 899.502231] env[62914]: _type = "Task" [ 899.502231] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.514649] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52208e34-f77b-8766-6b2b-b4c8f9cb88ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.577114] env[62914]: DEBUG nova.scheduler.client.report [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.938740] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.004032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.004032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.016950] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52208e34-f77b-8766-6b2b-b4c8f9cb88ef, 'name': SearchDatastore_Task, 'duration_secs': 0.014754} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.017986] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.018242] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 900.018536] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3016cfb-d96a-476a-bf80-313a5688bc31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.028600] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 900.028600] env[62914]: value = "task-4832088" [ 900.028600] env[62914]: _type = "Task" [ 900.028600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.041453] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.041748] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.047963] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.081061] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.081210] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 900.084801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.369s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.085765] env[62914]: DEBUG nova.objects.instance [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid 982936be-3cb1-4930-b135-8fc2019c5216 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.155598] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 900.157035] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dbc6cb-1b5e-4701-9eea-d4e4b962d6d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.164817] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 900.165376] env[62914]: ERROR oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk due to incomplete transfer. [ 900.165674] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f2f78f05-2e7d-4f61-95af-006393c36f5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.175801] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ff47f5-3e9d-5eb1-522e-f7eb4498d099/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 900.176128] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Uploaded image 10c30911-1fcc-4a60-90d8-e8e9e7c20b92 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 900.178294] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 900.178605] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3d898d3a-a022-44f5-a638-745ccf6cdc29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.186530] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 900.186530] env[62914]: value = "task-4832089" [ 900.186530] env[62914]: _type = "Task" [ 900.186530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.197145] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832089, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.437843] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.439456] env[62914]: DEBUG nova.network.neutron [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updated VIF entry in instance network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 900.439866] env[62914]: DEBUG nova.network.neutron [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.510259] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 900.543122] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.591924] env[62914]: DEBUG nova.compute.utils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 900.593390] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 900.593559] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 900.661649] env[62914]: DEBUG nova.policy [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5da18e2dc49746d8a7125efdc106d62b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd271710592bf47b79e16552221fe7107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 900.707361] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832089, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.944299] env[62914]: DEBUG oslo_concurrency.lockutils [req-7aa47454-37a2-4b0d-821c-f30b5544ece9 req-099ef52a-0e5b-46d8-b21d-a8290a8b3fdf service nova] Releasing lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.944850] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.036915] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.041224] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.991831} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.041562] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 901.041853] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.042738] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d22e9ecc-3006-44c3-be7f-cd4e3c9fa2fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.055440] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 901.055440] env[62914]: value = "task-4832090" [ 901.055440] env[62914]: _type = "Task" [ 901.055440] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.073902] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832090, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.097176] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 901.120573] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Successfully created port: ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.204241] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832089, 'name': Destroy_Task, 'duration_secs': 0.963287} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.204566] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Destroyed the VM [ 901.204977] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 901.205169] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-96ad0300-487b-4d87-ae15-a488b5bd4c3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.214100] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 901.214100] env[62914]: value = "task-4832091" [ 901.214100] env[62914]: _type = "Task" [ 901.214100] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.219673] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16a143b-8fba-4499-9692-63ad4a08f7a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.224303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.224620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.224824] env[62914]: DEBUG nova.compute.manager [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Going to confirm migration 4 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 901.231068] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.235731] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9c78a9-a4b5-43fe-b280-127de37b2ca9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.277025] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b678b3-6143-43d7-95df-cd9ccfcd79ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.285856] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70f82b1-4241-4b57-ac38-17d38a237ffa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.301531] env[62914]: DEBUG nova.compute.provider_tree [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.438755] env[62914]: DEBUG oslo_vmware.api [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832087, 'name': PowerOnVM_Task, 'duration_secs': 2.160309} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.439460] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 901.439710] env[62914]: INFO nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Took 11.15 seconds to spawn the instance on the hypervisor. [ 901.439940] env[62914]: DEBUG nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 901.440821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2fc802-03f2-4eb8-bad0-013129da6465 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.569265] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832090, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084728} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.569605] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.570516] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd26431-6899-4ff4-b5c1-c7f25517a76e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.593656] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.594041] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41bf8e53-9c24-4c98-badf-b038e5b9c3bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.619690] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 901.619690] env[62914]: value = "task-4832092" [ 901.619690] env[62914]: _type = "Task" [ 901.619690] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.630677] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832092, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.725394] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.805938] env[62914]: DEBUG nova.scheduler.client.report [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.837377] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.837571] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.837758] env[62914]: DEBUG nova.network.neutron [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.837953] env[62914]: DEBUG nova.objects.instance [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lazy-loading 'info_cache' on Instance uuid 557c0538-fc4a-403a-a9cb-b706e2260b1c {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.964295] env[62914]: INFO nova.compute.manager [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Took 52.08 seconds to build instance. [ 902.116528] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 902.131436] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.154439] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='695fadf8d7b0fd3f054c76d53d3fe49e',container_format='bare',created_at=2025-11-25T11:26:58Z,direct_url=,disk_format='vmdk',id=8d584922-9f5f-403e-b8e7-e412d68ca5ee,min_disk=1,min_ram=0,name='tempest-test-snap-1376743475',owner='d271710592bf47b79e16552221fe7107',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-11-25T11:27:14Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 902.154748] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 902.154978] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.155277] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 902.155454] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.155611] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 902.155840] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 902.156021] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 902.156208] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 902.156376] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 902.156555] env[62914]: DEBUG nova.virt.hardware [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.157542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eb9adc-1b47-4fd1-a799-121d399b3fc5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.166134] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc9158a-b6c9-4143-b52b-21d8c90cf4b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.226097] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.310958] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.226s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.314265] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.848s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.314962] env[62914]: INFO nova.compute.claims [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.340080] env[62914]: INFO nova.scheduler.client.report [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance 982936be-3cb1-4930-b135-8fc2019c5216 [ 902.467172] env[62914]: DEBUG oslo_concurrency.lockutils [None req-23afa176-23f6-40ec-a3ca-95fcf944415d tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.237s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.632316] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.731530] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.853620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fb064d83-3020-4a49-916e-7d551f3a2846 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "982936be-3cb1-4930-b135-8fc2019c5216" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.407s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.862970] env[62914]: DEBUG nova.compute.manager [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Received event network-vif-plugged-ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 902.862970] env[62914]: DEBUG oslo_concurrency.lockutils [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] Acquiring lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.862970] env[62914]: DEBUG oslo_concurrency.lockutils [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.862970] env[62914]: DEBUG oslo_concurrency.lockutils [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.862970] env[62914]: DEBUG nova.compute.manager [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] No waiting events found dispatching network-vif-plugged-ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.862970] env[62914]: WARNING nova.compute.manager [req-e64bcfb6-905a-4c12-a793-f0a1cd8f0902 req-5ce06797-b24a-4f11-b9ff-d2b1dea48208 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Received unexpected event network-vif-plugged-ae4f4a6c-15e7-46f4-8edd-c415125f941c for instance with vm_state building and task_state spawning. [ 902.969702] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 903.133662] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832092, 'name': ReconfigVM_Task, 'duration_secs': 1.222642} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.133801] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfigured VM instance instance-0000004a to attach disk [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.134569] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c24023f-55b3-4f95-8488-1a3edd207408 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.145653] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 903.145653] env[62914]: value = "task-4832093" [ 903.145653] env[62914]: _type = "Task" [ 903.145653] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.154511] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832093, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.156092] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Successfully updated port: ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.184805] env[62914]: DEBUG nova.compute.manager [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Received event network-changed-ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 903.184873] env[62914]: DEBUG nova.compute.manager [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Refreshing instance network info cache due to event network-changed-ae4f4a6c-15e7-46f4-8edd-c415125f941c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 903.185296] env[62914]: DEBUG oslo_concurrency.lockutils [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] Acquiring lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.185530] env[62914]: DEBUG oslo_concurrency.lockutils [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] Acquired lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.185765] env[62914]: DEBUG nova.network.neutron [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Refreshing network info cache for port ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 903.229947] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task} progress is 84%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.238703] env[62914]: DEBUG nova.network.neutron [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [{"id": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "address": "fa:16:3e:72:0c:2d", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7fd222f-01", "ovs_interfaceid": "e7fd222f-0127-4616-8d16-801a8c35a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.491229] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.655308] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832093, 'name': Rename_Task, 'duration_secs': 0.157704} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.655308] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 903.655497] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d1f19af-09f2-4999-9654-ffe3060796d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.658934] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.663431] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 903.663431] env[62914]: value = "task-4832094" [ 903.663431] env[62914]: _type = "Task" [ 903.663431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.677165] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.729264] env[62914]: DEBUG oslo_vmware.api [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832091, 'name': RemoveSnapshot_Task, 'duration_secs': 2.205138} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.732109] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 903.732364] env[62914]: INFO nova.compute.manager [None req-1ac232b0-0d48-4f7d-854d-b53f19037ec2 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 19.63 seconds to snapshot the instance on the hypervisor. [ 903.741859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-557c0538-fc4a-403a-a9cb-b706e2260b1c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.742305] env[62914]: DEBUG nova.objects.instance [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lazy-loading 'migration_context' on Instance uuid 557c0538-fc4a-403a-a9cb-b706e2260b1c {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.765127] env[62914]: DEBUG nova.network.neutron [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 903.799447] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b791967e-e611-41ed-b4b0-75f4ceebfa4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.808641] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b7ecc-c260-4122-bf3a-6226fcee074a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.846858] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23706f4f-0080-4150-82f2-5ae94974aa87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.856139] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7a64d8-e2be-4498-8d88-fa9b77cf2f7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.871163] env[62914]: DEBUG nova.compute.provider_tree [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.956126] env[62914]: DEBUG nova.network.neutron [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.120318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "af541b15-19ce-415a-b03e-cb605b780247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.120579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.174971] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.199029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.199029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.246085] env[62914]: DEBUG nova.objects.base [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Object Instance<557c0538-fc4a-403a-a9cb-b706e2260b1c> lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 904.246905] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d7a0ea-fa86-4e08-a795-5d91d49ba7ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.267544] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-319c0380-636d-4cb8-bb45-47e939264a5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.274188] env[62914]: DEBUG oslo_vmware.api [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 904.274188] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b12934-f3bc-b3c8-fb1c-932ce0962903" [ 904.274188] env[62914]: _type = "Task" [ 904.274188] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.282665] env[62914]: DEBUG oslo_vmware.api [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b12934-f3bc-b3c8-fb1c-932ce0962903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.377602] env[62914]: DEBUG nova.scheduler.client.report [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.458977] env[62914]: DEBUG oslo_concurrency.lockutils [req-094c0623-e558-47f6-a0bf-9a5027a2e6bc req-0f666675-25fe-4d58-b86b-162e09f61202 service nova] Releasing lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.459389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.459550] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.673949] env[62914]: DEBUG oslo_vmware.api [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832094, 'name': PowerOnVM_Task, 'duration_secs': 1.005212} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.674272] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 904.674502] env[62914]: INFO nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Took 9.51 seconds to spawn the instance on the hypervisor. [ 904.674641] env[62914]: DEBUG nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 904.675458] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fa0da5-d488-4bd5-aac1-7e786753fd58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.704742] env[62914]: DEBUG nova.compute.utils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 904.787150] env[62914]: DEBUG oslo_vmware.api [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b12934-f3bc-b3c8-fb1c-932ce0962903, 'name': SearchDatastore_Task, 'duration_secs': 0.036566} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.787478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.883466] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.884015] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 904.886779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.172s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.887037] env[62914]: DEBUG nova.objects.instance [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'resources' on Instance uuid b477cd62-49c2-4e3c-98ea-b4154dda4986 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.009154] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.195955] env[62914]: INFO nova.compute.manager [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Took 38.84 seconds to build instance. [ 905.207856] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.212905] env[62914]: DEBUG nova.network.neutron [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Updating instance_info_cache with network_info: [{"id": "ae4f4a6c-15e7-46f4-8edd-c415125f941c", "address": "fa:16:3e:d3:55:27", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae4f4a6c-15", "ovs_interfaceid": "ae4f4a6c-15e7-46f4-8edd-c415125f941c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.394084] env[62914]: DEBUG nova.compute.utils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.396728] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 905.396728] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.443151] env[62914]: DEBUG nova.policy [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fb02b5af55441b0b788b739fc8dc623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5adc4dc554ed4fe69f214161fd8ab9b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 905.701313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-da0ebc85-51ec-4975-b05c-b02390cb72da tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.717332] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "refresh_cache-43227b1e-c90a-47d0-a4f5-fd0af0826e94" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.717745] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance network_info: |[{"id": "ae4f4a6c-15e7-46f4-8edd-c415125f941c", "address": "fa:16:3e:d3:55:27", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae4f4a6c-15", "ovs_interfaceid": "ae4f4a6c-15e7-46f4-8edd-c415125f941c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 905.723282] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:55:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae4f4a6c-15e7-46f4-8edd-c415125f941c', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.731845] env[62914]: DEBUG oslo.service.loopingcall [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.733342] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 905.733393] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff5d327c-3220-48b6-b2ca-e30cb920644c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.752968] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Successfully created port: 4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.758016] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.758016] env[62914]: value = "task-4832095" [ 905.758016] env[62914]: _type = "Task" [ 905.758016] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.770719] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832095, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.900051] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 905.906887] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df4dac5-9581-4718-869d-e2cdd10e9d16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.917902] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd5b54c-2917-439a-8d73-942fdfa4fc16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.958015] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111801cc-5815-4c81-a90b-f3e4f8ecdbbf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.967790] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bae7a5-47e2-4a85-9eb5-59c63ccd8d04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.986436] env[62914]: DEBUG nova.compute.provider_tree [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.207556] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 906.269855] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832095, 'name': CreateVM_Task, 'duration_secs': 0.395918} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.270050] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 906.270863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.270999] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.271374] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.271646] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffe6ada5-6b44-463f-97cc-030090c62e8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.279232] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 906.279232] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52610cb1-86db-bcb4-ad30-d05e4204aaa7" [ 906.279232] env[62914]: _type = "Task" [ 906.279232] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.289482] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52610cb1-86db-bcb4-ad30-d05e4204aaa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.317863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.318182] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.318438] env[62914]: INFO nova.compute.manager [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Attaching volume a229af3a-92d4-4eec-8a9d-985189ec319c to /dev/sdb [ 906.357279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8383f79b-b5f5-4c38-a309-b0643c405b06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.365423] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf10b0a-5159-4d51-a543-24747c94f9b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.380535] env[62914]: DEBUG nova.virt.block_device [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Updating existing volume attachment record: 21160eab-2b9e-4240-aa71-d33b1e0d45b9 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 906.492028] env[62914]: DEBUG nova.scheduler.client.report [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.737292] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.781581] env[62914]: DEBUG nova.compute.manager [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 906.782679] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdceadc-069d-43c4-aaff-ac5b1a7705f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.799367] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.799675] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Processing image 8d584922-9f5f-403e-b8e7-e412d68ca5ee {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.799933] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.800109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.800302] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.800701] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86bd2dd0-f2ee-406d-978e-7834dfea4f52 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.810707] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.810982] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 906.811821] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d08207ab-f5c8-456a-b538-0b75f3b3bba6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.817534] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 906.817534] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f0fb43-d9a7-c6da-bccc-2fc14973f765" [ 906.817534] env[62914]: _type = "Task" [ 906.817534] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.829487] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f0fb43-d9a7-c6da-bccc-2fc14973f765, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.910895] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 906.944827] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.945336] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.945508] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.945812] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.946076] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.946344] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.946801] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.947013] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.947177] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.947439] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.947716] env[62914]: DEBUG nova.virt.hardware [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.949137] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ff1ba6-f6cb-41ef-8ad9-0beecbf87e41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.960828] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36e64b6-d1e8-4e2c-9862-8e07b5676d92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.996709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.002717] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.138s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.002717] env[62914]: INFO nova.compute.claims [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.021106] env[62914]: INFO nova.scheduler.client.report [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted allocations for instance b477cd62-49c2-4e3c-98ea-b4154dda4986 [ 907.089698] env[62914]: DEBUG nova.compute.manager [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Received event network-changed-5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 907.089788] env[62914]: DEBUG nova.compute.manager [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Refreshing instance network info cache due to event network-changed-5d5caccf-1912-40af-a849-900df4764c6f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 907.089963] env[62914]: DEBUG oslo_concurrency.lockutils [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.090908] env[62914]: DEBUG oslo_concurrency.lockutils [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.091180] env[62914]: DEBUG nova.network.neutron [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Refreshing network info cache for port 5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 907.300292] env[62914]: INFO nova.compute.manager [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] instance snapshotting [ 907.303503] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16228981-bbe4-4bc6-9cc6-c493b2c9cabc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.327592] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d18bb3-e4e3-4220-ba7e-5280f099f2d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.339322] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 907.339473] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Fetch image to [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167/OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 907.339677] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Downloading stream optimized image 8d584922-9f5f-403e-b8e7-e412d68ca5ee to [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167/OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167.vmdk on the data store datastore2 as vApp {{(pid=62914) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 907.339787] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Downloading image file data 8d584922-9f5f-403e-b8e7-e412d68ca5ee to the ESX as VM named 'OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167' {{(pid=62914) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 907.432180] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 907.432180] env[62914]: value = "resgroup-9" [ 907.432180] env[62914]: _type = "ResourcePool" [ 907.432180] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 907.432506] env[62914]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-af48dea6-406e-4c6c-84da-c2b705ff9be8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.455309] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease: (returnval){ [ 907.455309] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 907.455309] env[62914]: _type = "HttpNfcLease" [ 907.455309] env[62914]: } obtained for vApp import into resource pool (val){ [ 907.455309] env[62914]: value = "resgroup-9" [ 907.455309] env[62914]: _type = "ResourcePool" [ 907.455309] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 907.455774] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the lease: (returnval){ [ 907.455774] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 907.455774] env[62914]: _type = "HttpNfcLease" [ 907.455774] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 907.463151] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.463151] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 907.463151] env[62914]: _type = "HttpNfcLease" [ 907.463151] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 907.529766] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8761a635-a784-4efd-a8a6-e395393feef9 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "b477cd62-49c2-4e3c-98ea-b4154dda4986" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.891s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.846570] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 907.847112] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cc935b5a-23cf-4ea5-8b37-6916bb05a2a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.855791] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 907.855791] env[62914]: value = "task-4832100" [ 907.855791] env[62914]: _type = "Task" [ 907.855791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.868297] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832100, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.872394] env[62914]: DEBUG nova.compute.manager [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Received event network-vif-plugged-4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 907.872625] env[62914]: DEBUG oslo_concurrency.lockutils [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] Acquiring lock "55192659-4d65-4e74-a47f-46d650b6b095-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.872839] env[62914]: DEBUG oslo_concurrency.lockutils [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] Lock "55192659-4d65-4e74-a47f-46d650b6b095-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.873021] env[62914]: DEBUG oslo_concurrency.lockutils [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] Lock "55192659-4d65-4e74-a47f-46d650b6b095-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.873242] env[62914]: DEBUG nova.compute.manager [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] No waiting events found dispatching network-vif-plugged-4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 907.873431] env[62914]: WARNING nova.compute.manager [req-fa31f7fd-a53a-4fd1-ab56-c0d563bb92f2 req-d3ff361a-5d5c-4a82-905a-3a7dfc961c64 service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Received unexpected event network-vif-plugged-4ca49936-a41c-4418-a42a-114ca4faa7c4 for instance with vm_state building and task_state spawning. [ 907.890228] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Successfully updated port: 4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.917175] env[62914]: DEBUG nova.network.neutron [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updated VIF entry in instance network info cache for port 5d5caccf-1912-40af-a849-900df4764c6f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 907.917629] env[62914]: DEBUG nova.network.neutron [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.964879] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.964879] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 907.964879] env[62914]: _type = "HttpNfcLease" [ 907.964879] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 908.309362] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.309625] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.309834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.310060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.310316] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.314024] env[62914]: INFO nova.compute.manager [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Terminating instance [ 908.316608] env[62914]: DEBUG nova.compute.manager [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 908.316608] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 908.317469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8161cc-f6fe-471d-8b69-8f12098a6902 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.328944] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 908.329348] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d084f1d7-d46f-4dcf-af32-1a95c717cba9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.340243] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 908.340243] env[62914]: value = "task-4832101" [ 908.340243] env[62914]: _type = "Task" [ 908.340243] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.350168] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.371923] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832100, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.399337] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.399705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.399965] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.420759] env[62914]: DEBUG oslo_concurrency.lockutils [req-22dcc943-a465-42c9-bdef-249e587a485e req-06373c85-1f31-424e-ac94-217a2e3aa56a service nova] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.462687] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4733cdc-39c0-4788-9eb1-9ea87e7a7e54 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.470587] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.470587] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 908.470587] env[62914]: _type = "HttpNfcLease" [ 908.470587] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 908.473948] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 908.473948] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529113d9-4dda-8344-9fc3-1c0d705f4d8f" [ 908.473948] env[62914]: _type = "HttpNfcLease" [ 908.473948] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 908.475304] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a7ec9c-3c10-41ef-8f32-65d90c737b91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.479683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ff589c-c07d-4aa9-b549-44ee45c5f6c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.493237] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 908.493559] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 908.528157] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83029dda-41ba-483d-83f9-98932c2c3f31 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.588024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c217279-3b75-41da-ab4b-5cdcc4f8aff9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.594543] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-801ee187-2eca-4964-bde2-089839b2b964 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.607799] env[62914]: DEBUG nova.compute.provider_tree [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.852373] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832101, 'name': PowerOffVM_Task, 'duration_secs': 0.339532} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.853993] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 908.854256] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 908.854574] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa937b42-c510-49bf-99e9-357788984787 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.870047] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832100, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.929030] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 908.929467] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 908.929709] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleting the datastore file [datastore2] 1fb67ac1-c0b7-48b9-8562-d457d46709bc {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.930087] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-302b95c5-2033-489f-b150-ffcb59dc6f5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.940433] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for the task: (returnval){ [ 908.940433] env[62914]: value = "task-4832103" [ 908.940433] env[62914]: _type = "Task" [ 908.940433] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.942226] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.956967] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.112308] env[62914]: DEBUG nova.scheduler.client.report [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.144661] env[62914]: DEBUG nova.network.neutron [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Updating instance_info_cache with network_info: [{"id": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "address": "fa:16:3e:81:aa:bf", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ca49936-a4", "ovs_interfaceid": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.368579] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832100, 'name': CreateSnapshot_Task, 'duration_secs': 1.01494} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.368985] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 909.371126] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9128575-c24e-4f13-8720-bca7a1922e9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.457981] env[62914]: DEBUG oslo_vmware.api [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Task: {'id': task-4832103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262065} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.459521] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.459735] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 909.459926] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 909.460136] env[62914]: INFO nova.compute.manager [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 909.460436] env[62914]: DEBUG oslo.service.loopingcall [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.460683] env[62914]: DEBUG nova.compute.manager [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 909.460799] env[62914]: DEBUG nova.network.neutron [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 909.529370] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 909.529657] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.530819] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037e71f9-9dd2-4ac1-8f0c-5c2b8436aea7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.538350] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.538703] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 909.539182] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ab57b42b-06f5-4dac-8d67-d86801e9ff8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.620751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.621386] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 909.624225] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.563s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.626033] env[62914]: INFO nova.compute.claims [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.649251] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.651423] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Instance network_info: |[{"id": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "address": "fa:16:3e:81:aa:bf", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ca49936-a4", "ovs_interfaceid": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 909.651423] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:aa:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ca49936-a41c-4418-a42a-114ca4faa7c4', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.660099] env[62914]: DEBUG oslo.service.loopingcall [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.660219] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 909.660453] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb560060-848e-48eb-8fcf-22950179fff3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.685896] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.685896] env[62914]: value = "task-4832105" [ 909.685896] env[62914]: _type = "Task" [ 909.685896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.695409] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832105, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.835040] env[62914]: DEBUG oslo_vmware.rw_handles [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529f6668-e975-893f-b3b9-54db6ef1f5ad/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 909.835197] env[62914]: INFO nova.virt.vmwareapi.images [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Downloaded image file data 8d584922-9f5f-403e-b8e7-e412d68ca5ee [ 909.836176] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ec2fbe-6f74-4343-9529-7f1f2344cdcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.857043] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b632b0f-6b6a-4ded-b4d2-69803dd84888 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.892922] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 909.893775] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2c1db430-e901-49b9-a007-310698c6d5e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.903059] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 909.903059] env[62914]: value = "task-4832107" [ 909.903059] env[62914]: _type = "Task" [ 909.903059] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.912486] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.977087] env[62914]: DEBUG nova.compute.manager [req-cad25cd9-9bf8-476d-8e94-003f6018d691 req-9037bbcb-9d4f-4f65-8f4c-493d2c4610ff service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Received event network-vif-deleted-458d38ce-bc0b-471c-a588-9d31e99cbe74 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 909.977087] env[62914]: INFO nova.compute.manager [req-cad25cd9-9bf8-476d-8e94-003f6018d691 req-9037bbcb-9d4f-4f65-8f4c-493d2c4610ff service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Neutron deleted interface 458d38ce-bc0b-471c-a588-9d31e99cbe74; detaching it from the instance and deleting it from the info cache [ 909.977087] env[62914]: DEBUG nova.network.neutron [req-cad25cd9-9bf8-476d-8e94-003f6018d691 req-9037bbcb-9d4f-4f65-8f4c-493d2c4610ff service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.997925] env[62914]: DEBUG nova.compute.manager [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Received event network-changed-4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 909.998187] env[62914]: DEBUG nova.compute.manager [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Refreshing instance network info cache due to event network-changed-4ca49936-a41c-4418-a42a-114ca4faa7c4. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 909.998417] env[62914]: DEBUG oslo_concurrency.lockutils [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] Acquiring lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.998605] env[62914]: DEBUG oslo_concurrency.lockutils [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] Acquired lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.998868] env[62914]: DEBUG nova.network.neutron [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Refreshing network info cache for port 4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.130281] env[62914]: DEBUG nova.compute.utils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.134083] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 910.197216] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832105, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.205048] env[62914]: INFO nova.virt.vmwareapi.images [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] The imported VM was unregistered [ 910.208200] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 910.208507] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.208996] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ed77914-d6a6-4ffb-af44-5713ba6afc39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.224257] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.224529] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167/OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167.vmdk to [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk. {{(pid=62914) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 910.224839] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-75e0e328-efc4-49c5-b2f8-544193c41588 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.238097] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 910.238097] env[62914]: value = "task-4832108" [ 910.238097] env[62914]: _type = "Task" [ 910.238097] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.247675] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.314090] env[62914]: DEBUG nova.network.neutron [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.414232] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.477878] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c2f11c6-408c-455d-8b0f-a7c901067965 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.488701] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3f5fea-89ea-4422-8918-62097c3b6a8b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.539709] env[62914]: DEBUG nova.compute.manager [req-cad25cd9-9bf8-476d-8e94-003f6018d691 req-9037bbcb-9d4f-4f65-8f4c-493d2c4610ff service nova] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Detach interface failed, port_id=458d38ce-bc0b-471c-a588-9d31e99cbe74, reason: Instance 1fb67ac1-c0b7-48b9-8562-d457d46709bc could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 910.634729] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 910.708352] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832105, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.761576] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.816523] env[62914]: INFO nova.compute.manager [-] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Took 1.36 seconds to deallocate network for instance. [ 910.871887] env[62914]: DEBUG nova.network.neutron [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Updated VIF entry in instance network info cache for port 4ca49936-a41c-4418-a42a-114ca4faa7c4. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 910.872434] env[62914]: DEBUG nova.network.neutron [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Updating instance_info_cache with network_info: [{"id": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "address": "fa:16:3e:81:aa:bf", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ca49936-a4", "ovs_interfaceid": "4ca49936-a41c-4418-a42a-114ca4faa7c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.918842] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.154120] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231ebb84-1b37-4387-9c9a-7be26ee817c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.165966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cb179e-e1bd-485e-8aba-ccd2d05818c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.204920] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b7d09-280f-40a3-8502-767ada757019 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.217299] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832105, 'name': CreateVM_Task, 'duration_secs': 1.051857} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.219909] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 911.220761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.221047] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.221392] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 911.222665] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2972f1eb-6dc1-4815-8c1a-7cdfb5d13e80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.226986] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da033bbc-2065-477c-9dc5-8f936b4b95ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.243238] env[62914]: DEBUG nova.compute.provider_tree [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.248620] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 911.248620] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc0cc3-2070-bf8b-9da5-a71f108a872a" [ 911.248620] env[62914]: _type = "Task" [ 911.248620] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.258097] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.265129] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc0cc3-2070-bf8b-9da5-a71f108a872a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.324698] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.375831] env[62914]: DEBUG oslo_concurrency.lockutils [req-92165794-3475-427d-9b8d-4fc66f8d203b req-0526e03e-cb84-4ff8-aaaf-30a3f3a296ce service nova] Releasing lock "refresh_cache-55192659-4d65-4e74-a47f-46d650b6b095" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.419085] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.440236] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 911.440736] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941983', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'name': 'volume-a229af3a-92d4-4eec-8a9d-985189ec319c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '83de3d7c-2308-4678-ae90-a30705f6a8c4', 'attached_at': '', 'detached_at': '', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'serial': 'a229af3a-92d4-4eec-8a9d-985189ec319c'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 911.442055] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3239ddff-85a0-43d3-a1b0-2207a14e3352 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.470727] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f6f60a-1c7b-4f27-b7d5-0e3574faa0b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.514354] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] volume-a229af3a-92d4-4eec-8a9d-985189ec319c/volume-a229af3a-92d4-4eec-8a9d-985189ec319c.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.514996] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06d2e1b9-7241-49cb-9a31-d0a6b8d6c3d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.543210] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 911.543210] env[62914]: value = "task-4832109" [ 911.543210] env[62914]: _type = "Task" [ 911.543210] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.556394] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832109, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.659357] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 911.688405] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.688665] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.688920] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.689219] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.689387] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.689547] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.689769] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.689937] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.690182] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.690369] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.690549] env[62914]: DEBUG nova.virt.hardware [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.691623] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac57fb84-01cd-4614-9765-f9c878a2fc65 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.702405] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332ff983-0b09-4053-adbc-164ad1cdbbbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.719495] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.725452] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Creating folder: Project (581cee873ea54d71b2af00e4d2391853). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 911.725917] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b55ff302-44e3-4139-9eaf-267b4ed2fbfe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.741881] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Created folder: Project (581cee873ea54d71b2af00e4d2391853) in parent group-v941773. [ 911.742182] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Creating folder: Instances. Parent ref: group-v941988. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 911.742482] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4807f096-675c-4731-af5b-311acc52a65b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.751489] env[62914]: DEBUG nova.scheduler.client.report [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.761368] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Created folder: Instances in parent group-v941988. [ 911.761694] env[62914]: DEBUG oslo.service.loopingcall [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.765819] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 911.765946] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc0cc3-2070-bf8b-9da5-a71f108a872a, 'name': SearchDatastore_Task, 'duration_secs': 0.0985} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.769756] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fa523d1-107f-461f-a099-abbcd739e8f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.782557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.782890] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.783202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.783368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.783562] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.783914] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.784988] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-094b5928-aac7-4990-887d-3e09bc8fff3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.795823] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.795823] env[62914]: value = "task-4832112" [ 911.795823] env[62914]: _type = "Task" [ 911.795823] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.808500] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.808724] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 911.809819] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832112, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.810058] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-501bdd5b-9e0f-4254-bbca-94d13bb1629f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.818265] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 911.818265] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f710c-c995-dd13-4938-81b95a536be6" [ 911.818265] env[62914]: _type = "Task" [ 911.818265] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.829481] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f710c-c995-dd13-4938-81b95a536be6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.921140] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.059511] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832109, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.258063] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.261146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.261818] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 912.264959] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.274s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.265310] env[62914]: DEBUG nova.objects.instance [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lazy-loading 'resources' on Instance uuid c488ba7b-68cc-4876-934f-a11d33fca6ab {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.309226] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832112, 'name': CreateVM_Task, 'duration_secs': 0.421616} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.310173] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 912.310676] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.310900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.311260] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 912.311873] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa30cce4-fae8-4624-89bf-f2fd93f58644 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.325112] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 912.325112] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220f22f-f967-0de2-3d44-2d049b09e272" [ 912.325112] env[62914]: _type = "Task" [ 912.325112] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.334292] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524f710c-c995-dd13-4938-81b95a536be6, 'name': SearchDatastore_Task, 'duration_secs': 0.090832} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.335731] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46e2abc3-46e6-4352-87da-26dc49d62827 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.342621] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220f22f-f967-0de2-3d44-2d049b09e272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.344365] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 912.344365] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d71b44-1594-807d-0d11-cbde4354a574" [ 912.344365] env[62914]: _type = "Task" [ 912.344365] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.354350] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d71b44-1594-807d-0d11-cbde4354a574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.417112] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.554938] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832109, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.759033] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.769109] env[62914]: DEBUG nova.compute.utils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.770952] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 912.839790] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220f22f-f967-0de2-3d44-2d049b09e272, 'name': SearchDatastore_Task, 'duration_secs': 0.089922} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.841985] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.842294] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.842515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.855099] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d71b44-1594-807d-0d11-cbde4354a574, 'name': SearchDatastore_Task, 'duration_secs': 0.091599} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.858477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.858944] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 55192659-4d65-4e74-a47f-46d650b6b095/55192659-4d65-4e74-a47f-46d650b6b095.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 912.859494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.859798] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.860053] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10c6726e-5834-463f-a169-a4dd072ba2fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.862281] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7fa8170-f11b-4f2d-b933-150e7db2be1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.873384] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 912.873384] env[62914]: value = "task-4832113" [ 912.873384] env[62914]: _type = "Task" [ 912.873384] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.874660] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.874847] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 912.878351] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4751707e-0ff9-4836-82dd-6d37d8299149 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.889754] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.890123] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 912.890123] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5289a460-dbd6-b725-a9e0-c6889bd2e705" [ 912.890123] env[62914]: _type = "Task" [ 912.890123] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.901101] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5289a460-dbd6-b725-a9e0-c6889bd2e705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.922182] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.057452] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832109, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.196860] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808b40bb-4de7-4a84-a23e-4f3504a8cfdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.204704] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92fed72-3603-454c-be96-7cbffb5d4a74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.236029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc0f196-f147-4417-8da1-6638a3345c22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.245148] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9043797-94d9-48ba-b7bf-71b29c1bc2c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.266103] env[62914]: DEBUG nova.compute.provider_tree [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.270777] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832108, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.810069} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.271412] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167/OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167.vmdk to [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk. [ 913.271632] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Cleaning up location [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 913.271809] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_500437d4-87b5-4975-af3a-b2ad4dd38167 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.272104] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80680180-dd96-43db-a870-d93b2ec0e518 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.275520] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 913.280502] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 913.280502] env[62914]: value = "task-4832114" [ 913.280502] env[62914]: _type = "Task" [ 913.280502] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.292607] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.388147] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832113, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.405066] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5289a460-dbd6-b725-a9e0-c6889bd2e705, 'name': SearchDatastore_Task, 'duration_secs': 0.028374} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.405340] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78320e79-934e-4a65-bcf1-796eb2e1b3be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.414440] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 913.414440] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a93e04-4bc6-f6a6-aec2-e91fac4760bf" [ 913.414440] env[62914]: _type = "Task" [ 913.414440] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.422335] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.428273] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a93e04-4bc6-f6a6-aec2-e91fac4760bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.558309] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832109, 'name': ReconfigVM_Task, 'duration_secs': 1.819931} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.558701] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfigured VM instance instance-00000049 to attach disk [datastore2] volume-a229af3a-92d4-4eec-8a9d-985189ec319c/volume-a229af3a-92d4-4eec-8a9d-985189ec319c.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.564326] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef7c1ffa-f1f2-4076-bb5b-5489f9988447 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.581896] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 913.581896] env[62914]: value = "task-4832115" [ 913.581896] env[62914]: _type = "Task" [ 913.581896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.592131] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.773020] env[62914]: DEBUG nova.scheduler.client.report [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.796309] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212381} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.796592] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.796758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.797093] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk to [datastore2] 43227b1e-c90a-47d0-a4f5-fd0af0826e94/43227b1e-c90a-47d0-a4f5-fd0af0826e94.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 913.797393] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46778eb1-17d4-4378-bd8a-ebf45e7f4970 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.806247] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 913.806247] env[62914]: value = "task-4832116" [ 913.806247] env[62914]: _type = "Task" [ 913.806247] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.816054] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.888075] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696117} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.888075] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 55192659-4d65-4e74-a47f-46d650b6b095/55192659-4d65-4e74-a47f-46d650b6b095.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 913.888234] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.888496] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d1c0e28-4c54-45ce-a3d4-c08ebff12511 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.895845] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 913.895845] env[62914]: value = "task-4832117" [ 913.895845] env[62914]: _type = "Task" [ 913.895845] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.910944] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.922066] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832107, 'name': CloneVM_Task, 'duration_secs': 3.783833} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.922764] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Created linked-clone VM from snapshot [ 913.923581] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20aff152-1a1a-4424-891c-4ab9f0a8efb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.929974] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a93e04-4bc6-f6a6-aec2-e91fac4760bf, 'name': SearchDatastore_Task, 'duration_secs': 0.079327} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.930719] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.930982] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bdec185e-2af7-4379-8c67-03e125750bb4/bdec185e-2af7-4379-8c67-03e125750bb4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 913.931292] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da71b428-291f-4989-b5b5-4d97cb4a4a0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.936591] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Uploading image ff487db2-eea6-4a26-b872-6a68c137710a {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 913.944839] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 913.944839] env[62914]: value = "task-4832118" [ 913.944839] env[62914]: _type = "Task" [ 913.944839] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.954855] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.964557] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 913.964557] env[62914]: value = "vm-941987" [ 913.964557] env[62914]: _type = "VirtualMachine" [ 913.964557] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 913.964886] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a9bcaf6b-a7c2-43de-b1e7-3d8b10668006 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.975051] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease: (returnval){ [ 913.975051] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5247ba78-7fb6-291c-ac8e-43212cfb179d" [ 913.975051] env[62914]: _type = "HttpNfcLease" [ 913.975051] env[62914]: } obtained for exporting VM: (result){ [ 913.975051] env[62914]: value = "vm-941987" [ 913.975051] env[62914]: _type = "VirtualMachine" [ 913.975051] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 913.975051] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the lease: (returnval){ [ 913.975051] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5247ba78-7fb6-291c-ac8e-43212cfb179d" [ 913.975051] env[62914]: _type = "HttpNfcLease" [ 913.975051] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 913.983038] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 913.983038] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5247ba78-7fb6-291c-ac8e-43212cfb179d" [ 913.983038] env[62914]: _type = "HttpNfcLease" [ 913.983038] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 914.092941] env[62914]: DEBUG oslo_vmware.api [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832115, 'name': ReconfigVM_Task, 'duration_secs': 0.214785} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.093336] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941983', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'name': 'volume-a229af3a-92d4-4eec-8a9d-985189ec319c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '83de3d7c-2308-4678-ae90-a30705f6a8c4', 'attached_at': '', 'detached_at': '', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'serial': 'a229af3a-92d4-4eec-8a9d-985189ec319c'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 914.278668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.281630] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.220s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.283214] env[62914]: INFO nova.compute.claims [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.291744] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 914.315994] env[62914]: INFO nova.scheduler.client.report [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleted allocations for instance c488ba7b-68cc-4876-934f-a11d33fca6ab [ 914.323019] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 914.323320] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 914.323494] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.323689] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 914.323844] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.323995] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 914.324234] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 914.324427] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 914.324607] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 914.324777] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 914.325193] env[62914]: DEBUG nova.virt.hardware [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 914.325548] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.326337] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b297ca-7c20-446d-a78f-b94d1edb0236 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.337550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e6e8d3-62c3-47e0-8e92-d2ac088a3885 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.353557] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.359265] env[62914]: DEBUG oslo.service.loopingcall [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.360289] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 914.360547] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58fec4bf-a4af-4cbf-b07e-15c84494fa70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.379204] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.379204] env[62914]: value = "task-4832120" [ 914.379204] env[62914]: _type = "Task" [ 914.379204] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.388787] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832120, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.405489] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09425} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.405767] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.406567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb21fcc2-edbd-40a5-a22e-0549d58674bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.432753] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 55192659-4d65-4e74-a47f-46d650b6b095/55192659-4d65-4e74-a47f-46d650b6b095.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.433142] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48e848cb-4818-40b9-ba2b-ed6925b91efd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.458021] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.459543] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 914.459543] env[62914]: value = "task-4832121" [ 914.459543] env[62914]: _type = "Task" [ 914.459543] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.482572] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 914.482572] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5247ba78-7fb6-291c-ac8e-43212cfb179d" [ 914.482572] env[62914]: _type = "HttpNfcLease" [ 914.482572] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 914.482890] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 914.482890] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5247ba78-7fb6-291c-ac8e-43212cfb179d" [ 914.482890] env[62914]: _type = "HttpNfcLease" [ 914.482890] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 914.483663] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f567e1-bb84-4c0d-8df0-56f381962ba4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.492050] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 914.492265] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 914.818822] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.837278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f550be54-8adc-4b86-94d8-ff79f220cb3c tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "c488ba7b-68cc-4876-934f-a11d33fca6ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.002s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.891480] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832120, 'name': CreateVM_Task, 'duration_secs': 0.338797} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.891685] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 914.892198] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.892379] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.892727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 914.893412] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01b6f75c-ab9b-4232-8ca4-1cd8255d46d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.899924] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 914.899924] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523f3a1d-dd96-18a8-563c-bc8245175d92" [ 914.899924] env[62914]: _type = "Task" [ 914.899924] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.910051] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523f3a1d-dd96-18a8-563c-bc8245175d92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.915874] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-122fdcc2-37fb-4402-a51d-a6875c4e147c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.963692] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832118, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.972122] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.148823] env[62914]: DEBUG nova.objects.instance [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'flavor' on Instance uuid 83de3d7c-2308-4678-ae90-a30705f6a8c4 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.319109] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.419044] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523f3a1d-dd96-18a8-563c-bc8245175d92, 'name': SearchDatastore_Task, 'duration_secs': 0.140322} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.419044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.419044] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.419044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.419044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.419044] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.419044] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2941c87f-2eb7-47a3-991e-51b55707df99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.443815] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.444052] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 915.444888] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706d40ce-bd05-4b8a-ac9a-2148ad558942 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.454302] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 915.454302] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b278dc-ab56-8641-55aa-fd48cb04eb7f" [ 915.454302] env[62914]: _type = "Task" [ 915.454302] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.475041] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832118, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.479190] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b278dc-ab56-8641-55aa-fd48cb04eb7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.487388] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.657574] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2aa430e4-e321-4c77-8922-bf92e4a58052 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.339s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.772630] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7156527-b62b-40ae-a1a4-3dc7e8badac4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.784188] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cb2f3c-23f5-410d-94aa-b00ba8b72a3e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.826277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48a0e4d-2a54-4586-bf96-cfc7e0e9ae38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.836589] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.840195] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1639c20b-4d6d-4417-b951-78d871adffe8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.860528] env[62914]: DEBUG nova.compute.provider_tree [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.969191] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832118, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.714137} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.972653] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bdec185e-2af7-4379-8c67-03e125750bb4/bdec185e-2af7-4379-8c67-03e125750bb4.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 915.972907] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.973207] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2eb666db-e8b4-413d-9323-85ee07531797 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.979279] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b278dc-ab56-8641-55aa-fd48cb04eb7f, 'name': SearchDatastore_Task, 'duration_secs': 0.104911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.980456] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b532e5cd-767c-4da2-92cb-cfbfef959950 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.986512] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.988946] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 915.988946] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52754474-8063-8e62-e40e-31804cc9cb10" [ 915.988946] env[62914]: _type = "Task" [ 915.988946] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.989277] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 915.989277] env[62914]: value = "task-4832122" [ 915.989277] env[62914]: _type = "Task" [ 915.989277] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.002501] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52754474-8063-8e62-e40e-31804cc9cb10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.006686] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.094929] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.095418] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.095764] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.096106] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.096341] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.101130] env[62914]: INFO nova.compute.manager [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Terminating instance [ 916.103883] env[62914]: DEBUG nova.compute.manager [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 916.104155] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 916.104433] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91e646e0-e2f7-471f-aff8-c96034ccf275 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.115517] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 916.115517] env[62914]: value = "task-4832123" [ 916.115517] env[62914]: _type = "Task" [ 916.115517] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.128912] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.336567] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.364016] env[62914]: DEBUG nova.scheduler.client.report [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.488448] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.506829] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084143} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.510895] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.511316] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52754474-8063-8e62-e40e-31804cc9cb10, 'name': SearchDatastore_Task, 'duration_secs': 0.095806} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.512258] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8441dd-dc3f-40d8-8857-fdc5a6fa6b02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.515318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.515541] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 916.516172] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8730bd26-9a1c-4f69-906f-fc4718e74bfc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.542156] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] bdec185e-2af7-4379-8c67-03e125750bb4/bdec185e-2af7-4379-8c67-03e125750bb4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.543969] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7a4b4e9-3096-4698-8f86-8f0389b76ff2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.562678] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 916.562678] env[62914]: value = "task-4832124" [ 916.562678] env[62914]: _type = "Task" [ 916.562678] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.569707] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 916.569707] env[62914]: value = "task-4832125" [ 916.569707] env[62914]: _type = "Task" [ 916.569707] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.579252] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.586349] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.628040] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.833658] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.871629] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.872052] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 916.875244] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.221s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.876179] env[62914]: DEBUG nova.objects.instance [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lazy-loading 'resources' on Instance uuid d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.988870] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.075024] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.086055] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832125, 'name': ReconfigVM_Task, 'duration_secs': 0.352595} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.086505] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Reconfigured VM instance instance-0000004d to attach disk [datastore2] bdec185e-2af7-4379-8c67-03e125750bb4/bdec185e-2af7-4379-8c67-03e125750bb4.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.087404] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40b033f7-0319-48c0-80c8-81639eb84da1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.096689] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 917.096689] env[62914]: value = "task-4832126" [ 917.096689] env[62914]: _type = "Task" [ 917.096689] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.109267] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832126, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.128316] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832123, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.336866] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832116, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.257388} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.337260] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8d584922-9f5f-403e-b8e7-e412d68ca5ee/8d584922-9f5f-403e-b8e7-e412d68ca5ee.vmdk to [datastore2] 43227b1e-c90a-47d0-a4f5-fd0af0826e94/43227b1e-c90a-47d0-a4f5-fd0af0826e94.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 917.338527] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433c5dc5-0faa-40b5-8951-bc3cc9f71b02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.372462] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 43227b1e-c90a-47d0-a4f5-fd0af0826e94/43227b1e-c90a-47d0-a4f5-fd0af0826e94.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.373146] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e991905b-00d4-4e11-a593-a1035f344c8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.390809] env[62914]: DEBUG nova.compute.utils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 917.396238] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 917.397609] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 917.405952] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 917.405952] env[62914]: value = "task-4832127" [ 917.405952] env[62914]: _type = "Task" [ 917.405952] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.416668] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.447189] env[62914]: DEBUG nova.policy [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 917.484488] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "29a177e4-b5d7-4249-8fc5-2316f6891536" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.484839] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.485154] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.485473] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.485716] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.493820] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832121, 'name': ReconfigVM_Task, 'duration_secs': 2.746569} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.493820] env[62914]: INFO nova.compute.manager [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Terminating instance [ 917.494896] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 55192659-4d65-4e74-a47f-46d650b6b095/55192659-4d65-4e74-a47f-46d650b6b095.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.498228] env[62914]: DEBUG nova.compute.manager [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 917.498458] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 917.498798] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8eb294d1-f590-43b6-b212-1c7676aef0a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.501351] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d1c625-f2b5-4c46-a7ae-15008b5bec6d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.512142] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 917.514355] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52f0efec-e165-410d-846d-7cae5b15d12a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.516014] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 917.516014] env[62914]: value = "task-4832128" [ 917.516014] env[62914]: _type = "Task" [ 917.516014] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.525896] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 917.525896] env[62914]: value = "task-4832129" [ 917.525896] env[62914]: _type = "Task" [ 917.525896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.534521] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832128, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.544258] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.576170] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832124, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.609716] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832126, 'name': Rename_Task, 'duration_secs': 0.259478} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.610117] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 917.610405] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-628465e3-e63a-4555-9610-68c38a9986b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.626283] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 917.626283] env[62914]: value = "task-4832130" [ 917.626283] env[62914]: _type = "Task" [ 917.626283] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.642032] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832123, 'name': PowerOffVM_Task, 'duration_secs': 1.074081} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.643212] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 917.643212] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 917.644060] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941983', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'name': 'volume-a229af3a-92d4-4eec-8a9d-985189ec319c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '83de3d7c-2308-4678-ae90-a30705f6a8c4', 'attached_at': '', 'detached_at': '', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'serial': 'a229af3a-92d4-4eec-8a9d-985189ec319c'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 917.644502] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b447af0f-ca0a-4642-93eb-5d9d8f44e914 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.651795] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832130, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.681772] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e119187c-0772-4f28-8644-7280b4850180 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.691259] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18022c3-e0e1-4ce7-91a8-c7c0ac743f91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.718063] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19decd8e-07a3-496b-8c49-43a7ea7fbf3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.738061] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] The volume has not been displaced from its original location: [datastore2] volume-a229af3a-92d4-4eec-8a9d-985189ec319c/volume-a229af3a-92d4-4eec-8a9d-985189ec319c.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 917.743982] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 917.747170] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-037d74d7-7863-4595-8a9a-adf3f46c0214 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.768546] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 917.768546] env[62914]: value = "task-4832131" [ 917.768546] env[62914]: _type = "Task" [ 917.768546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.779823] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832131, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.848045] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Successfully created port: 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.897416] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 917.921011] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832127, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.965774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e001617c-1165-4567-b584-13ee1eec7da5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.976547] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6f8a03-a4ba-412d-be0a-120dc0469d6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.011815] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efef3158-8dce-47ca-84a6-fa263d49ec1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.027513] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf4fedd-245c-406a-9aa4-dcbd533a4e9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.040232] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832128, 'name': Rename_Task, 'duration_secs': 0.313215} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.051440] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 918.052147] env[62914]: DEBUG nova.compute.provider_tree [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.056575] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-256dbcb1-5681-4fe0-bdad-3887e5a61a18 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.058442] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.064901] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 918.064901] env[62914]: value = "task-4832132" [ 918.064901] env[62914]: _type = "Task" [ 918.064901] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.082618] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832124, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.13384} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.087040] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 918.087334] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.088116] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832132, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.088451] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d84ab608-e6a6-4fc3-844c-76c4b8e9abf2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.095883] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 918.095883] env[62914]: value = "task-4832133" [ 918.095883] env[62914]: _type = "Task" [ 918.095883] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.106205] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.145507] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832130, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.281353] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832131, 'name': ReconfigVM_Task, 'duration_secs': 0.478722} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.281353] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 918.286592] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9cb72ae-1eca-424e-8c2b-4ce62e6fac67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.308278] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 918.308278] env[62914]: value = "task-4832134" [ 918.308278] env[62914]: _type = "Task" [ 918.308278] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.318898] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.418464] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832127, 'name': ReconfigVM_Task, 'duration_secs': 0.805518} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.418953] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 43227b1e-c90a-47d0-a4f5-fd0af0826e94/43227b1e-c90a-47d0-a4f5-fd0af0826e94.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.419722] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ff38805-71c6-4a3f-89bc-95fd1cdb7366 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.428198] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 918.428198] env[62914]: value = "task-4832135" [ 918.428198] env[62914]: _type = "Task" [ 918.428198] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.438019] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832135, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.542704] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832129, 'name': PowerOffVM_Task, 'duration_secs': 0.71262} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.543335] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 918.543442] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 918.543793] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b495649-ad37-4dc7-96a6-adcd0b3e86df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.560988] env[62914]: DEBUG nova.scheduler.client.report [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.577498] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832132, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.606999] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076444} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.607423] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.608476] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88af4fed-d703-4804-855c-6943d40143c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.637178] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.638228] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49e0510c-39c8-4f11-b46f-f93c042921dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.673570] env[62914]: DEBUG oslo_vmware.api [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832130, 'name': PowerOnVM_Task, 'duration_secs': 0.719302} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.675258] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 918.675948] env[62914]: INFO nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Took 7.02 seconds to spawn the instance on the hypervisor. [ 918.675948] env[62914]: DEBUG nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 918.676183] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 918.676183] env[62914]: value = "task-4832137" [ 918.676183] env[62914]: _type = "Task" [ 918.676183] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.676892] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df4b25d-b471-48a5-835c-8ae01012eb2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.696454] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.820451] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832134, 'name': ReconfigVM_Task, 'duration_secs': 0.366715} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.820841] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941983', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'name': 'volume-a229af3a-92d4-4eec-8a9d-985189ec319c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '83de3d7c-2308-4678-ae90-a30705f6a8c4', 'attached_at': '', 'detached_at': '', 'volume_id': 'a229af3a-92d4-4eec-8a9d-985189ec319c', 'serial': 'a229af3a-92d4-4eec-8a9d-985189ec319c'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 918.821168] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 918.821993] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402ea382-f495-4555-86f0-01089e9f8dfe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.830216] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 918.830998] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6924f227-5e5f-4ed2-a5e3-592c7fc06a15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.908865] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 918.937991] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 918.938288] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 918.938435] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.938636] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 918.938821] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.938979] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 918.939211] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 918.939374] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 918.939546] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 918.939716] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 918.939893] env[62914]: DEBUG nova.virt.hardware [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 918.940964] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70dccf7-bc87-4671-9914-eac454830b8f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.947286] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832135, 'name': Rename_Task, 'duration_secs': 0.265313} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.948143] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 918.948635] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2344a2c5-8989-47a9-b19f-6a447c120101 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.953945] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e51b55-efbf-4c07-9d1f-d546d517271f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.959233] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 918.959233] env[62914]: value = "task-4832139" [ 918.959233] env[62914]: _type = "Task" [ 918.959233] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.975621] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832139, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.068506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.071614] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.299s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.071614] env[62914]: DEBUG nova.objects.instance [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'resources' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.084725] env[62914]: DEBUG oslo_vmware.api [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832132, 'name': PowerOnVM_Task, 'duration_secs': 0.821823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.085791] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 919.086110] env[62914]: INFO nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Took 12.17 seconds to spawn the instance on the hypervisor. [ 919.086330] env[62914]: DEBUG nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 919.087497] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cf4736-9c6f-4eda-bd09-2aeefef06631 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.099261] env[62914]: INFO nova.scheduler.client.report [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Deleted allocations for instance d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f [ 919.191237] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832137, 'name': ReconfigVM_Task, 'duration_secs': 0.320548} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.191555] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.192250] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64633d61-f77a-432c-a219-104d367ec523 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.202937] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 919.202937] env[62914]: value = "task-4832140" [ 919.202937] env[62914]: _type = "Task" [ 919.202937] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.208294] env[62914]: INFO nova.compute.manager [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Took 44.37 seconds to build instance. [ 919.215875] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832140, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.337385] env[62914]: DEBUG nova.compute.manager [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-vif-plugged-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 919.337517] env[62914]: DEBUG oslo_concurrency.lockutils [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.338085] env[62914]: DEBUG oslo_concurrency.lockutils [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.338301] env[62914]: DEBUG oslo_concurrency.lockutils [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.338540] env[62914]: DEBUG nova.compute.manager [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] No waiting events found dispatching network-vif-plugged-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 919.338892] env[62914]: WARNING nova.compute.manager [req-a9cb4bea-8843-421a-b6fb-b71ad9046cb6 req-69b4effc-b865-4345-a513-d30d3fdcaf76 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received unexpected event network-vif-plugged-24c487f8-b730-47b7-8817-5b3894271b0f for instance with vm_state building and task_state spawning. [ 919.444691] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Successfully updated port: 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.472494] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832139, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.577642] env[62914]: DEBUG nova.objects.instance [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'numa_topology' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.614194] env[62914]: INFO nova.compute.manager [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Took 46.18 seconds to build instance. [ 919.614700] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3723296-b128-4fe9-bd04-421a95937481 tempest-ServerMetadataTestJSON-819378476 tempest-ServerMetadataTestJSON-819378476-project-member] Lock "d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.064s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.690144] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 919.690553] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 919.690830] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleting the datastore file [datastore1] 29a177e4-b5d7-4249-8fc5-2316f6891536 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.691199] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c4c5833-65da-4ae2-a15f-34baafe0fa26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.701546] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for the task: (returnval){ [ 919.701546] env[62914]: value = "task-4832141" [ 919.701546] env[62914]: _type = "Task" [ 919.701546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.711237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d7b8fd7-e45c-4497-b0b3-ab6543ec01f5 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.068s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.716349] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.720121] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832140, 'name': Rename_Task, 'duration_secs': 0.166175} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.720439] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 919.720676] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-685aafae-e6ac-481b-8985-e765f2cf4e42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.727969] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 919.727969] env[62914]: value = "task-4832142" [ 919.727969] env[62914]: _type = "Task" [ 919.727969] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.739162] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.947536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.947693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.947854] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 919.972519] env[62914]: DEBUG oslo_vmware.api [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832139, 'name': PowerOnVM_Task, 'duration_secs': 0.958445} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.972873] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 919.973927] env[62914]: INFO nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Took 17.86 seconds to spawn the instance on the hypervisor. [ 919.973927] env[62914]: DEBUG nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 919.974745] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0907561-66fa-4e5a-9769-dbd5f2c5a373 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.007404] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 920.007607] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 920.008325] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore2] 83de3d7c-2308-4678-ae90-a30705f6a8c4 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 920.008325] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4d386b9-1281-40bf-9801-6a5bfc67b527 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.017446] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 920.017446] env[62914]: value = "task-4832143" [ 920.017446] env[62914]: _type = "Task" [ 920.017446] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.029676] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.081807] env[62914]: DEBUG nova.objects.base [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 920.118557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7012f74-0446-470e-92cc-25ca3bee07c9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.917s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.213160] env[62914]: DEBUG oslo_vmware.api [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Task: {'id': task-4832141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.502657} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.213524] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.213642] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 920.213828] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 920.214017] env[62914]: INFO nova.compute.manager [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Took 2.72 seconds to destroy the instance on the hypervisor. [ 920.214297] env[62914]: DEBUG oslo.service.loopingcall [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.217501] env[62914]: DEBUG nova.compute.manager [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 920.217603] env[62914]: DEBUG nova.network.neutron [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.239872] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832142, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.502282] env[62914]: INFO nova.compute.manager [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Took 50.43 seconds to build instance. [ 920.533088] env[62914]: DEBUG oslo_vmware.api [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308816} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.535862] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.535862] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 920.535862] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 920.535862] env[62914]: INFO nova.compute.manager [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Took 4.43 seconds to destroy the instance on the hypervisor. [ 920.535862] env[62914]: DEBUG oslo.service.loopingcall [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.535862] env[62914]: DEBUG nova.compute.manager [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 920.535862] env[62914]: DEBUG nova.network.neutron [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.545424] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 920.585903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7df303-cd3d-4d42-b84d-25fa57277eb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.599619] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add41a83-0c11-4412-ace3-e91d675cdd22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.643894] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fe7e54-eb03-493b-9f22-c411ecae09e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.655459] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d2fa1b-a023-4fd2-b480-62bc8b2671c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.672670] env[62914]: DEBUG nova.compute.provider_tree [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.740857] env[62914]: DEBUG oslo_vmware.api [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832142, 'name': PowerOnVM_Task, 'duration_secs': 0.690781} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.741454] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 920.741692] env[62914]: INFO nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Took 6.45 seconds to spawn the instance on the hypervisor. [ 920.742018] env[62914]: DEBUG nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 920.742857] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3773affa-756c-4b45-91cd-d78978063cd9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.006174] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c28771bc-9cdd-4531-add3-36defb81a38e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.071s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.007417] env[62914]: DEBUG nova.network.neutron [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.015685] env[62914]: DEBUG nova.network.neutron [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.176746] env[62914]: DEBUG nova.scheduler.client.report [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.264269] env[62914]: INFO nova.compute.manager [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Took 38.22 seconds to build instance. [ 921.381219] env[62914]: DEBUG nova.network.neutron [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.513815] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.514183] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Instance network_info: |[{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 921.514703] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:78:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24c487f8-b730-47b7-8817-5b3894271b0f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.526201] env[62914]: DEBUG oslo.service.loopingcall [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 921.526580] env[62914]: INFO nova.compute.manager [-] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Took 1.31 seconds to deallocate network for instance. [ 921.526851] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 921.528993] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e47f3d5f-ac13-4d02-b9b9-26d2609addc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.560639] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.560639] env[62914]: value = "task-4832144" [ 921.560639] env[62914]: _type = "Task" [ 921.560639] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.569099] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832144, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.684725] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.611s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.686909] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.577s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.690429] env[62914]: INFO nova.compute.claims [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.727933] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.728836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.728982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.729562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.729562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.742858] env[62914]: INFO nova.compute.manager [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Terminating instance [ 921.745086] env[62914]: DEBUG nova.compute.manager [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 921.746068] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 921.749346] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673af40a-51b2-4bda-a1e6-4b8f927badf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.764088] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 921.764903] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10fedd9a-ee1f-4128-b810-b2f9a41f1fb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.774569] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b71393a-d7d1-4dc5-8e44-18363a6d16a6 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.758s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.777548] env[62914]: DEBUG nova.compute.manager [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 921.777935] env[62914]: DEBUG nova.compute.manager [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 921.779280] env[62914]: DEBUG oslo_concurrency.lockutils [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.779280] env[62914]: DEBUG oslo_concurrency.lockutils [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.779280] env[62914]: DEBUG nova.network.neutron [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 921.792245] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 921.792245] env[62914]: value = "task-4832145" [ 921.792245] env[62914]: _type = "Task" [ 921.792245] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.806065] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.887406] env[62914]: INFO nova.compute.manager [-] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Took 1.35 seconds to deallocate network for instance. [ 921.905291] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.905945] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.052829] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.071944] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832144, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.193013] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ccd9e18b-6f09-4b90-b180-29182b87af5c tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 60.282s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.194532] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 27.341s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.194695] env[62914]: INFO nova.compute.manager [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Unshelving [ 922.301122] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832145, 'name': PowerOffVM_Task, 'duration_secs': 0.249082} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.302033] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 922.302033] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 922.303161] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f01b5b7f-647b-4c77-a552-0b1d0140b214 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.411192] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 922.415264] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 922.416533] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 922.418120] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore2] 43227b1e-c90a-47d0-a4f5-fd0af0826e94 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.426790] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a1ccdaf-cea8-4af3-adba-c019a230e65d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.435320] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 922.435320] env[62914]: value = "task-4832147" [ 922.435320] env[62914]: _type = "Task" [ 922.435320] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.446712] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.474261] env[62914]: INFO nova.compute.manager [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Took 0.59 seconds to detach 1 volumes for instance. [ 922.509231] env[62914]: INFO nova.compute.manager [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Rebuilding instance [ 922.568640] env[62914]: DEBUG nova.compute.manager [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 922.569681] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b224d9-6cc2-4670-8c5d-89094e205a9e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.591393] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832144, 'name': CreateVM_Task, 'duration_secs': 0.926232} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.591915] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 922.592681] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.592859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.593231] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.593514] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb655ca2-7081-49ca-8d52-a531bf4a10af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.601655] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 922.601655] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258a884-a6a9-bb49-6d5b-9666ef2f04f5" [ 922.601655] env[62914]: _type = "Task" [ 922.601655] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.610848] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258a884-a6a9-bb49-6d5b-9666ef2f04f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.932587] env[62914]: DEBUG nova.network.neutron [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 922.933076] env[62914]: DEBUG nova.network.neutron [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.938852] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.948571] env[62914]: DEBUG oslo_vmware.api [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365909} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.948571] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.948802] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 922.948984] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 922.949467] env[62914]: INFO nova.compute.manager [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Took 1.20 seconds to destroy the instance on the hypervisor. [ 922.949467] env[62914]: DEBUG oslo.service.loopingcall [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.950285] env[62914]: DEBUG nova.compute.manager [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 922.950382] env[62914]: DEBUG nova.network.neutron [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 922.983984] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.093579] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 923.096863] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04c97c64-d852-4e62-8a66-e9a4c09a8806 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.109787] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 923.109787] env[62914]: value = "task-4832148" [ 923.109787] env[62914]: _type = "Task" [ 923.109787] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.122265] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258a884-a6a9-bb49-6d5b-9666ef2f04f5, 'name': SearchDatastore_Task, 'duration_secs': 0.035779} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.124613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.124953] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.127032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.127032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.127032] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.127032] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e378af3c-7753-43e7-9482-35fc224df39f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.132351] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.143487] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.143730] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 923.144549] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2669718f-61b1-458c-869e-9f4583961a17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.151709] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 923.151709] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5293d712-9565-6765-518c-4a7919eb555d" [ 923.151709] env[62914]: _type = "Task" [ 923.151709] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.162716] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5293d712-9565-6765-518c-4a7919eb555d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.212905] env[62914]: DEBUG nova.compute.utils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 923.280464] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339d1aec-cec5-4044-bf43-c2f25d173838 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.289108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e9b887-2135-4ff4-8bd9-26e6158e530d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.323711] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd464f7d-b68f-4689-a6ae-33c2d1226eba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.332473] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb1e374-9062-4378-9f29-f3247712ba28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.347423] env[62914]: DEBUG nova.compute.provider_tree [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.437385] env[62914]: DEBUG oslo_concurrency.lockutils [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.437685] env[62914]: DEBUG nova.compute.manager [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Received event network-vif-deleted-c68776d2-73ad-4ec2-b114-31f5878098d8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 923.437888] env[62914]: DEBUG nova.compute.manager [req-8db50fbf-4832-472b-bc22-5f9aa42336bd req-2dda4c57-e208-40e6-b52b-97fffa3e7044 service nova] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Received event network-vif-deleted-9be7ced3-94cd-47b2-8902-92429978705c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 923.561023] env[62914]: DEBUG nova.compute.manager [req-0badaa3b-05d3-4d11-9634-29815f01fca8 req-8b125a73-90ec-4f9e-a4f3-6d7ecdddd2ac service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Received event network-vif-deleted-ae4f4a6c-15e7-46f4-8edd-c415125f941c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 923.561628] env[62914]: INFO nova.compute.manager [req-0badaa3b-05d3-4d11-9634-29815f01fca8 req-8b125a73-90ec-4f9e-a4f3-6d7ecdddd2ac service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Neutron deleted interface ae4f4a6c-15e7-46f4-8edd-c415125f941c; detaching it from the instance and deleting it from the info cache [ 923.561860] env[62914]: DEBUG nova.network.neutron [req-0badaa3b-05d3-4d11-9634-29815f01fca8 req-8b125a73-90ec-4f9e-a4f3-6d7ecdddd2ac service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.620307] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832148, 'name': PowerOffVM_Task, 'duration_secs': 0.153372} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.620612] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 923.620887] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 923.621836] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bb128e-335a-4a02-9101-7659e09099dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.630652] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 923.631228] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a463d14-b3a1-4059-ae1d-d17e76adf199 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.660179] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 923.660450] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 923.660653] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleting the datastore file [datastore2] 769c3873-7480-47de-894b-40dbf3f2f7f0 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.664829] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29567ecc-820c-4ada-9cc1-389a45e47537 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.666813] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5293d712-9565-6765-518c-4a7919eb555d, 'name': SearchDatastore_Task, 'duration_secs': 0.032363} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.668121] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ac790f7-6a21-4509-90be-3a4b41f046af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.672639] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 923.672639] env[62914]: value = "task-4832150" [ 923.672639] env[62914]: _type = "Task" [ 923.672639] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.677323] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 923.677323] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f9dd52-cae1-720b-5977-0c230bc4904d" [ 923.677323] env[62914]: _type = "Task" [ 923.677323] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.684379] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.689140] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f9dd52-cae1-720b-5977-0c230bc4904d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.717064] env[62914]: INFO nova.virt.block_device [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Booting with volume 37c44301-e13e-475c-b93b-3d45c6886107 at /dev/sdb [ 923.752531] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1d3e07f-3996-4b0e-bf0d-7680149730d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.762197] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9177fd-feb6-4178-989c-2b6d9b53887b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.801305] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb15d0f9-671f-47f4-bd58-1dda5d18ef7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.811072] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0612addb-e336-4253-91ff-cc2b0d16e7f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.849406] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54064b10-5af6-4696-b3d9-445b88e83c46 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.853059] env[62914]: DEBUG nova.scheduler.client.report [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.861349] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159ab911-7cc9-49eb-9575-8c11519d0206 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.876949] env[62914]: DEBUG nova.virt.block_device [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating existing volume attachment record: dd89dc52-363b-48f4-a04a-7905a252c8ed {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 923.983825] env[62914]: DEBUG nova.network.neutron [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.064603] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56b221eb-1fc4-443c-9d91-3b313f553b45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.074925] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f14ef6-eb61-459d-ae88-dd271f7f3c07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.115828] env[62914]: DEBUG nova.compute.manager [req-0badaa3b-05d3-4d11-9634-29815f01fca8 req-8b125a73-90ec-4f9e-a4f3-6d7ecdddd2ac service nova] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Detach interface failed, port_id=ae4f4a6c-15e7-46f4-8edd-c415125f941c, reason: Instance 43227b1e-c90a-47d0-a4f5-fd0af0826e94 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 924.185753] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369387} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.186481] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.186696] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 924.186896] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 924.193281] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f9dd52-cae1-720b-5977-0c230bc4904d, 'name': SearchDatastore_Task, 'duration_secs': 0.033835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.193897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.194193] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] aedc785f-619f-4b9f-850f-790f84e57577/aedc785f-619f-4b9f-850f-790f84e57577.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 924.194481] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6c84237-3214-4112-82fd-0b2e5cfcb0b5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.201385] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 924.201385] env[62914]: value = "task-4832154" [ 924.201385] env[62914]: _type = "Task" [ 924.201385] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.211381] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.358241] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.359052] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 924.363852] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.879s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.363852] env[62914]: DEBUG nova.objects.instance [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lazy-loading 'resources' on Instance uuid aede8da7-8bf2-4963-b08b-6e06007614a5 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.487036] env[62914]: INFO nova.compute.manager [-] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Took 1.54 seconds to deallocate network for instance. [ 924.712159] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.867210] env[62914]: DEBUG nova.compute.utils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.872931] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 924.873229] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 924.956382] env[62914]: DEBUG nova.policy [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a8cfcd0aed9499a83c09052328647cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '894c73ea90624428afeb1165afbbfa9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 924.994244] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.217653] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.231178] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 925.231503] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 925.231720] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.231973] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 925.232170] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.232332] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 925.232569] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 925.232721] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 925.232897] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 925.233101] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 925.233291] env[62914]: DEBUG nova.virt.hardware [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 925.234328] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8812222-5543-439a-b89c-8d68e20545c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.246959] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce1a6f3-5110-4c80-b3b5-7291baedf695 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.266439] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 925.272534] env[62914]: DEBUG oslo.service.loopingcall [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.275918] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 925.276576] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4aa6c85d-ee69-425e-9e66-f0e68666b456 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.298791] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.298791] env[62914]: value = "task-4832155" [ 925.298791] env[62914]: _type = "Task" [ 925.298791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.308276] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832155, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.325404] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb4d060-50c1-4df5-ba95-411deaa0373c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.337202] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc7fc6d-2202-4577-8c45-4dc054a982a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.368178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a1c6ef-898d-4397-9bf2-b7107feefd00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.373143] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 925.380348] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05e25ea-8a4b-437d-8003-5fe719c206be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.396687] env[62914]: DEBUG nova.compute.provider_tree [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.399897] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Successfully created port: 94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.718930] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.811761] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832155, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.902427] env[62914]: DEBUG nova.scheduler.client.report [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.215511] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.309317] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832155, 'name': CreateVM_Task, 'duration_secs': 0.601902} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.309484] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 926.309892] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.310083] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.310482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.310820] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0ed704d-820c-468c-924e-78f513e2a4fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.316039] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 926.316039] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52722227-4dbe-fd24-03e3-06dab8ceb571" [ 926.316039] env[62914]: _type = "Task" [ 926.316039] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.324909] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52722227-4dbe-fd24-03e3-06dab8ceb571, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.388809] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 926.408218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.412689] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.105s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.414259] env[62914]: INFO nova.compute.claims [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.425362] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.425362] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.425640] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.425772] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.425924] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.426157] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.426888] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.426888] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.427719] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.427719] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.427953] env[62914]: DEBUG nova.virt.hardware [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.429275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c864b4-7c20-4a5c-bc22-21257c00a0da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.442073] env[62914]: INFO nova.scheduler.client.report [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Deleted allocations for instance aede8da7-8bf2-4963-b08b-6e06007614a5 [ 926.445782] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff21e26-9245-4912-81fc-a2a1c6493424 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.717876] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832154, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.081749} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.718173] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] aedc785f-619f-4b9f-850f-790f84e57577/aedc785f-619f-4b9f-850f-790f84e57577.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 926.718551] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.718949] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2370b638-249b-4dda-83cf-c93ae7690ce8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.729887] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 926.729887] env[62914]: value = "task-4832157" [ 926.729887] env[62914]: _type = "Task" [ 926.729887] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.745519] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.828882] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52722227-4dbe-fd24-03e3-06dab8ceb571, 'name': SearchDatastore_Task, 'duration_secs': 0.031814} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.829341] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.829664] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.829991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.830221] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.830505] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.831261] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07359333-0d1f-49d4-a297-43dabb3544a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.843562] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.843858] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 926.844830] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb0c403c-8602-48f4-964e-79a2b2ee4b98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.852817] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 926.852817] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526a9ad7-e588-6a6d-5981-87f22b324732" [ 926.852817] env[62914]: _type = "Task" [ 926.852817] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.863459] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526a9ad7-e588-6a6d-5981-87f22b324732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.959987] env[62914]: DEBUG oslo_concurrency.lockutils [None req-49323baf-fd18-4912-95b1-27a07fada1ac tempest-MigrationsAdminTest-16747298 tempest-MigrationsAdminTest-16747298-project-member] Lock "aede8da7-8bf2-4963-b08b-6e06007614a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.477s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.244602] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.244922] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.245802] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eca63c2-684f-4f93-a182-6e5ada5c6110 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.272557] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] aedc785f-619f-4b9f-850f-790f84e57577/aedc785f-619f-4b9f-850f-790f84e57577.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.272973] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14c05e82-1553-406d-8802-b1dc15cf6b9b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.295801] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 927.295801] env[62914]: value = "task-4832158" [ 927.295801] env[62914]: _type = "Task" [ 927.295801] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.308454] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832158, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.311609] env[62914]: DEBUG nova.compute.manager [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Received event network-vif-plugged-94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 927.311841] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.312142] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.312330] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.312508] env[62914]: DEBUG nova.compute.manager [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] No waiting events found dispatching network-vif-plugged-94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 927.312675] env[62914]: WARNING nova.compute.manager [req-4ad027e5-6ff6-443e-8ca3-bc863f4d58fb req-bf5e99f7-6ac8-4e9b-b205-beed86c1d7d9 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Received unexpected event network-vif-plugged-94d0e4cd-493e-4e41-89dc-b0636889e9d9 for instance with vm_state building and task_state spawning. [ 927.364063] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526a9ad7-e588-6a6d-5981-87f22b324732, 'name': SearchDatastore_Task, 'duration_secs': 0.020477} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.364910] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ea242f5-eaf5-4662-a124-d538888d5f64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.375977] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 927.375977] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5261b8ac-89f7-13ec-95cd-f150f56deaf1" [ 927.375977] env[62914]: _type = "Task" [ 927.375977] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.383333] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5261b8ac-89f7-13ec-95cd-f150f56deaf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.394338] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 927.395796] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81891b17-4d45-425e-99bc-25328f3ceb7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.403460] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 927.404028] env[62914]: ERROR oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk due to incomplete transfer. [ 927.404468] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c3a43c29-0869-4015-aa2b-3722f4c68bb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.413657] env[62914]: DEBUG oslo_vmware.rw_handles [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52000a17-2898-eb81-5e27-d088cb164cf8/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 927.414135] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Uploaded image ff487db2-eea6-4a26-b872-6a68c137710a to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 927.419332] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 927.421364] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a3bebd01-2fa8-4596-8d57-a1c78a50f4f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.432672] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Successfully updated port: 94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.444028] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 927.444028] env[62914]: value = "task-4832159" [ 927.444028] env[62914]: _type = "Task" [ 927.444028] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.456499] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832159, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.458368] env[62914]: DEBUG nova.scheduler.client.report [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 927.482163] env[62914]: DEBUG nova.scheduler.client.report [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 927.482620] env[62914]: DEBUG nova.compute.provider_tree [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.495380] env[62914]: DEBUG nova.scheduler.client.report [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 927.515427] env[62914]: DEBUG nova.scheduler.client.report [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 927.806925] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832158, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.887345] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5261b8ac-89f7-13ec-95cd-f150f56deaf1, 'name': SearchDatastore_Task, 'duration_secs': 0.037888} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.889886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.889886] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 927.890347] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0f30e9d-8b25-49e5-96ca-66b16a60a13c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.898363] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 927.898363] env[62914]: value = "task-4832160" [ 927.898363] env[62914]: _type = "Task" [ 927.898363] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.914285] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832160, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.928022] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.928022] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.936238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.936238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.936238] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 927.952872] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832159, 'name': Destroy_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.003955] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8f1ba9-32c8-4141-834f-d7ee671b80b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.016786] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeb2e3f-b822-47ea-a00d-22f71c5359ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.055251] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6433e791-c463-4732-9bf3-8c5f0a872ac9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.063208] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8d802a-a7f1-418e-9277-10f77095ccb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.077756] env[62914]: DEBUG nova.compute.provider_tree [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.134036] env[62914]: DEBUG nova.objects.instance [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lazy-loading 'flavor' on Instance uuid cca4bbf9-8864-4805-b95e-954e6b570eae {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.307797] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832158, 'name': ReconfigVM_Task, 'duration_secs': 0.719921} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.308263] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfigured VM instance instance-0000004f to attach disk [datastore1] aedc785f-619f-4b9f-850f-790f84e57577/aedc785f-619f-4b9f-850f-790f84e57577.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.309625] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82221e4b-909b-4317-a62e-331ff94be76e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.320847] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 928.320847] env[62914]: value = "task-4832161" [ 928.320847] env[62914]: _type = "Task" [ 928.320847] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.330940] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832161, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.408447] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832160, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.438099] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.438293] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 928.456208] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832159, 'name': Destroy_Task, 'duration_secs': 0.541481} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.456629] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Destroyed the VM [ 928.456960] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 928.457263] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-31b417df-6559-447f-9f66-50ef9446a6aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.470025] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 928.470025] env[62914]: value = "task-4832162" [ 928.470025] env[62914]: _type = "Task" [ 928.470025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.477556] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832162, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.517821] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.583617] env[62914]: DEBUG nova.scheduler.client.report [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.638168] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.638574] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.839421] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832161, 'name': Rename_Task, 'duration_secs': 0.291347} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.839817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 928.840138] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1affe891-84cb-40c9-9a2d-d962141fc85d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.851663] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 928.851663] env[62914]: value = "task-4832163" [ 928.851663] env[62914]: _type = "Task" [ 928.851663] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.861438] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.862710] env[62914]: DEBUG nova.network.neutron [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.911147] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832160, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.979311] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832162, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.067825] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.068439] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.091157] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.092343] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 929.095148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.507s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.095451] env[62914]: DEBUG nova.objects.instance [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lazy-loading 'resources' on Instance uuid 7d8287f9-10be-4834-8b7a-1b764145d1c3 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.145210] env[62914]: DEBUG nova.network.neutron [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 929.270303] env[62914]: DEBUG nova.compute.manager [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 929.270538] env[62914]: DEBUG nova.compute.manager [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing instance network info cache due to event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 929.270822] env[62914]: DEBUG oslo_concurrency.lockutils [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.345575] env[62914]: DEBUG nova.compute.manager [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Received event network-changed-94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 929.345953] env[62914]: DEBUG nova.compute.manager [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Refreshing instance network info cache due to event network-changed-94d0e4cd-493e-4e41-89dc-b0636889e9d9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 929.346149] env[62914]: DEBUG oslo_concurrency.lockutils [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.363020] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.367990] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.367990] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Instance network_info: |[{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 929.367990] env[62914]: DEBUG oslo_concurrency.lockutils [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.367990] env[62914]: DEBUG nova.network.neutron [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Refreshing network info cache for port 94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 929.369172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:9e:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94d0e4cd-493e-4e41-89dc-b0636889e9d9', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.378348] env[62914]: DEBUG oslo.service.loopingcall [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.381327] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 929.381982] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98ee4d06-1ff5-44b6-bfd2-684710d08519 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.405849] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.405849] env[62914]: value = "task-4832164" [ 929.405849] env[62914]: _type = "Task" [ 929.405849] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.413409] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832160, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.014948} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.414876] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 929.414876] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 929.415114] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9e4c95e-fecd-4e97-9e8d-208f50992a12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.420516] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832164, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.426796] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 929.426796] env[62914]: value = "task-4832165" [ 929.426796] env[62914]: _type = "Task" [ 929.426796] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.440376] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.483163] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832162, 'name': RemoveSnapshot_Task} progress is 68%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.512443] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.573737] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 929.599052] env[62914]: DEBUG nova.compute.utils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.604310] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 929.604310] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.667614] env[62914]: DEBUG nova.policy [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5e4285d82cd420bb797f2fadf6b034d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ffdaa966ecb4979845fda7778c7fb45', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 929.695598] env[62914]: DEBUG nova.network.neutron [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updated VIF entry in instance network info cache for port 94d0e4cd-493e-4e41-89dc-b0636889e9d9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 929.696119] env[62914]: DEBUG nova.network.neutron [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.865274] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.921629] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832164, 'name': CreateVM_Task, 'duration_secs': 0.375161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.921842] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 929.922790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.923027] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.923327] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.926828] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff27fc7-4176-4df2-bc05-2ae28ee3f574 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.935885] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 929.935885] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5226d3ad-bb74-5f55-26e4-6c647efbb2ea" [ 929.935885] env[62914]: _type = "Task" [ 929.935885] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.940162] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16213} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.946108] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 929.949783] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdfaf0f-b629-40eb-8a44-eaf4bf91d6d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.961245] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5226d3ad-bb74-5f55-26e4-6c647efbb2ea, 'name': SearchDatastore_Task, 'duration_secs': 0.011015} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.968518] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.968882] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.969052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.969208] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.969500] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.979611] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.983400] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64b6fea3-dbdb-4dff-8db8-d89e7621fc24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.987608] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cde1443e-e28e-43fd-ba98-cdd629e244c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.004587] env[62914]: DEBUG nova.network.neutron [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.015016] env[62914]: DEBUG oslo_vmware.api [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832162, 'name': RemoveSnapshot_Task, 'duration_secs': 1.156163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.019687] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 930.019963] env[62914]: INFO nova.compute.manager [None req-1b76492f-b0fe-4073-8fb8-63054bf0ed74 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Took 22.72 seconds to snapshot the instance on the hypervisor. [ 930.022400] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 930.022400] env[62914]: value = "task-4832166" [ 930.022400] env[62914]: _type = "Task" [ 930.022400] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.022655] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.022793] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 930.024566] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b1c42c6-2897-46c1-8687-ab7e01b12b24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.034893] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 930.034893] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523c652c-d9d9-1abf-7b28-c93767da1dc2" [ 930.034893] env[62914]: _type = "Task" [ 930.034893] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.043016] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.055663] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523c652c-d9d9-1abf-7b28-c93767da1dc2, 'name': SearchDatastore_Task, 'duration_secs': 0.01176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.055895] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0db6463-b03b-4fb2-bfb0-8644036ba99a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.067133] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 930.067133] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb30a2-3b61-ed71-cb01-50c96dcef875" [ 930.067133] env[62914]: _type = "Task" [ 930.067133] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.078211] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb30a2-3b61-ed71-cb01-50c96dcef875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.094690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.104721] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 930.183050] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f917036f-2b9f-47d9-95f8-4751719b1eaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.189273] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Successfully created port: 8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.194318] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbfe99a-eb23-4849-bf75-ec216329e155 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.198192] env[62914]: DEBUG oslo_concurrency.lockutils [req-a2f006b4-7f42-44ec-b1f5-2bb5acd74a83 req-ab982bfc-6757-44ec-9941-4013b790dd73 service nova] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.229700] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a7c9d8-0f1b-4a53-9713-64fe9d440286 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.239951] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ec43c9-c799-4f43-86e4-3c2adf9eac84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.255409] env[62914]: DEBUG nova.compute.provider_tree [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.366363] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.508208] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.508571] env[62914]: DEBUG nova.compute.manager [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Inject network info {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 930.508824] env[62914]: DEBUG nova.compute.manager [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] network_info to inject: |[{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 930.517018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfiguring VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 930.517462] env[62914]: DEBUG oslo_concurrency.lockutils [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.517704] env[62914]: DEBUG nova.network.neutron [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 930.521749] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bc0850b-2318-4a22-a229-8efab41f32a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.547677] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.549399] env[62914]: DEBUG oslo_vmware.api [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 930.549399] env[62914]: value = "task-4832167" [ 930.549399] env[62914]: _type = "Task" [ 930.549399] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.559270] env[62914]: DEBUG oslo_vmware.api [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832167, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.583304] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb30a2-3b61-ed71-cb01-50c96dcef875, 'name': SearchDatastore_Task, 'duration_secs': 0.010687} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.583967] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.584269] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 930.584548] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3554f0f-a397-42ba-9f0f-87e32ce85f07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.591873] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 930.591873] env[62914]: value = "task-4832168" [ 930.591873] env[62914]: _type = "Task" [ 930.591873] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.600864] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.759761] env[62914]: DEBUG nova.scheduler.client.report [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.868842] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.911987] env[62914]: DEBUG nova.objects.instance [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lazy-loading 'flavor' on Instance uuid cca4bbf9-8864-4805-b95e-954e6b570eae {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.052208] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.063231] env[62914]: DEBUG oslo_vmware.api [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832167, 'name': ReconfigVM_Task, 'duration_secs': 0.355802} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.063386] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-39f5de54-b5e5-4e60-97f9-38f65a523f2c tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfigured VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 931.108774] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832168, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.113742] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 931.153840] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.154746] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.154984] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.155233] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.155396] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.155551] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.155770] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.155937] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.156125] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.156295] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.156473] env[62914]: DEBUG nova.virt.hardware [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.157381] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec61705-1e2c-452d-b149-7d9338fc1645 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.169467] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9449c019-dad3-44cf-aca4-4d5c3a1a1e94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.266847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.269407] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.233s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.271036] env[62914]: INFO nova.compute.claims [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.313058] env[62914]: INFO nova.scheduler.client.report [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Deleted allocations for instance 7d8287f9-10be-4834-8b7a-1b764145d1c3 [ 931.365993] env[62914]: DEBUG oslo_vmware.api [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832163, 'name': PowerOnVM_Task, 'duration_secs': 2.282056} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.366409] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 931.366584] env[62914]: INFO nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Took 12.46 seconds to spawn the instance on the hypervisor. [ 931.366817] env[62914]: DEBUG nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 931.368304] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fb1b3d-e99a-46db-8853-bb1f6ad46391 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.419485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.550546] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832166, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.592138] env[62914]: DEBUG nova.network.neutron [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updated VIF entry in instance network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 931.592736] env[62914]: DEBUG nova.network.neutron [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.607424] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.608467] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 931.609049] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.609049] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d53481e8-7f8b-4039-960f-cfcc199a67e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.618042] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 931.618042] env[62914]: value = "task-4832169" [ 931.618042] env[62914]: _type = "Task" [ 931.618042] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.631587] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832169, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.821304] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ee815cab-3528-4b38-8bde-dab0673e4663 tempest-FloatingIPsAssociationTestJSON-150665211 tempest-FloatingIPsAssociationTestJSON-150665211-project-member] Lock "7d8287f9-10be-4834-8b7a-1b764145d1c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.491s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.892023] env[62914]: INFO nova.compute.manager [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Took 46.85 seconds to build instance. [ 932.051717] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832166, 'name': ReconfigVM_Task, 'duration_secs': 1.546017} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.052077] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 769c3873-7480-47de-894b-40dbf3f2f7f0/769c3873-7480-47de-894b-40dbf3f2f7f0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.052749] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36a1859a-eb96-4c10-b498-cc1e30517123 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.062179] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 932.062179] env[62914]: value = "task-4832170" [ 932.062179] env[62914]: _type = "Task" [ 932.062179] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.073938] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832170, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.096036] env[62914]: DEBUG oslo_concurrency.lockutils [req-460e494b-c527-4100-9624-0da5a29be641 req-29be211c-8079-41df-ba79-52d6e207119d service nova] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.096549] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.129547] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832169, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071679} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.129547] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.130855] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9344ca-99c9-4876-bc0d-c74ad0dda925 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.160723] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.162586] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cbf8da4-e185-4413-ad73-6bbf91217b19 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.185025] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 932.185025] env[62914]: value = "task-4832171" [ 932.185025] env[62914]: _type = "Task" [ 932.185025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.193473] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.340426] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Successfully updated port: 8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.390293] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c457e963-77f1-4676-9918-0ba575e44d47 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.832s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.537296] env[62914]: DEBUG nova.network.neutron [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 932.578889] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832170, 'name': Rename_Task, 'duration_secs': 0.2462} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.580035] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 932.580374] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1b06eaf-9646-471b-9e4a-4fd518c3148e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.588775] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 932.588775] env[62914]: value = "task-4832172" [ 932.588775] env[62914]: _type = "Task" [ 932.588775] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.600936] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.696911] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832171, 'name': ReconfigVM_Task, 'duration_secs': 0.295818} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.697259] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.697954] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1cfdc4c-21a0-4efa-9ce3-5b54d5e8d382 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.706674] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 932.706674] env[62914]: value = "task-4832173" [ 932.706674] env[62914]: _type = "Task" [ 932.706674] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.716769] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832173, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.720935] env[62914]: DEBUG nova.compute.manager [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Received event network-vif-plugged-8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 932.722130] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] Acquiring lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.722130] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] Lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.722130] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] Lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.722130] env[62914]: DEBUG nova.compute.manager [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] No waiting events found dispatching network-vif-plugged-8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.722755] env[62914]: WARNING nova.compute.manager [req-6c09d9d7-666c-4e9b-9576-7c237be16f07 req-354fc1bd-7349-4a39-8b3a-da6cdc71c698 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Received unexpected event network-vif-plugged-8074d5d3-e32f-45b2-94c7-d1c4105fb78d for instance with vm_state building and task_state spawning. [ 932.757805] env[62914]: DEBUG nova.compute.manager [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 932.757805] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7426d2-6907-47f9-a868-433168224def {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.820064] env[62914]: DEBUG nova.compute.manager [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Received event network-changed-8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 932.820307] env[62914]: DEBUG nova.compute.manager [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Refreshing instance network info cache due to event network-changed-8074d5d3-e32f-45b2-94c7-d1c4105fb78d. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 932.820557] env[62914]: DEBUG oslo_concurrency.lockutils [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] Acquiring lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.820787] env[62914]: DEBUG oslo_concurrency.lockutils [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] Acquired lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.822040] env[62914]: DEBUG nova.network.neutron [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Refreshing network info cache for port 8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 932.846692] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.893874] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aadd36a-b5b4-404e-b3f7-d49fefcb4263 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.903649] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4459069-cddc-45a1-93b8-22bc6d4aa3fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.940367] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f65d7b5-d93d-4d95-a374-e2fc2ebdc3c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.952211] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e56db7-378c-4767-8c80-8ce1bf3cfd9e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.967730] env[62914]: DEBUG nova.compute.provider_tree [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.973528] env[62914]: DEBUG nova.compute.manager [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 932.973528] env[62914]: DEBUG nova.compute.manager [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing instance network info cache due to event network-changed-3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 932.973642] env[62914]: DEBUG oslo_concurrency.lockutils [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] Acquiring lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.999155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.999401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.101374] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.220024] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832173, 'name': Rename_Task, 'duration_secs': 0.143308} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.220637] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 933.220938] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e156c4e9-a9c0-464e-a796-a0e3efdf8dbb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.230410] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 933.230410] env[62914]: value = "task-4832174" [ 933.230410] env[62914]: _type = "Task" [ 933.230410] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.240683] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.278016] env[62914]: INFO nova.compute.manager [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] instance snapshotting [ 933.280687] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd62b809-e2ee-4ef7-90d4-6e6ef4116cfe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.304767] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd3e45c-5f0a-4d31-8a51-0224e8a95313 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.382542] env[62914]: DEBUG nova.network.neutron [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 933.475848] env[62914]: DEBUG nova.scheduler.client.report [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.482029] env[62914]: DEBUG nova.network.neutron [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.501955] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 933.603441] env[62914]: DEBUG oslo_vmware.api [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832172, 'name': PowerOnVM_Task, 'duration_secs': 0.96762} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.603827] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 933.603965] env[62914]: DEBUG nova.compute.manager [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 933.604968] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1d0b0c-75c8-4e35-bba5-b5db1d332143 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.608709] env[62914]: DEBUG nova.network.neutron [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.745487] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832174, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.821875] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 933.822350] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2ce329b4-0829-4536-a183-e9a75c44c4b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.833025] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 933.833025] env[62914]: value = "task-4832175" [ 933.833025] env[62914]: _type = "Task" [ 933.833025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.847802] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832175, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.982267] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.982862] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 933.986388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.495s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.987561] env[62914]: INFO nova.compute.claims [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.995436] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Didn't find any instances for network info cache update. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10090}} [ 933.995868] env[62914]: DEBUG oslo_concurrency.lockutils [req-51b70374-ad41-4922-b152-b2ea8b8c5f3f req-03e144e8-8067-4c60-913a-2e24bdace1ac service nova] Releasing lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.996604] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.996827] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.996984] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 933.998806] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.999134] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.999326] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.999478] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.999624] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.999751] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 933.999895] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.049531] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.112779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.112779] env[62914]: DEBUG nova.compute.manager [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Inject network info {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 934.113061] env[62914]: DEBUG nova.compute.manager [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] network_info to inject: |[{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 934.120799] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfiguring VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 934.121484] env[62914]: DEBUG oslo_concurrency.lockutils [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] Acquired lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.121563] env[62914]: DEBUG nova.network.neutron [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Refreshing network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 934.126596] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ad8bd5e-4121-44fb-ad28-fa61ee361b50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.151208] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.157963] env[62914]: DEBUG oslo_vmware.api [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 934.157963] env[62914]: value = "task-4832176" [ 934.157963] env[62914]: _type = "Task" [ 934.157963] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.170154] env[62914]: DEBUG oslo_vmware.api [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.242911] env[62914]: DEBUG oslo_vmware.api [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832174, 'name': PowerOnVM_Task, 'duration_secs': 0.533904} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.243315] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 934.243585] env[62914]: INFO nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Took 7.85 seconds to spawn the instance on the hypervisor. [ 934.243796] env[62914]: DEBUG nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 934.244711] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ae6f17-f450-4b1f-9cb2-03cc3868a954 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.311632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "cca4bbf9-8864-4805-b95e-954e6b570eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.311936] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.313039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.313297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.313502] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.317183] env[62914]: INFO nova.compute.manager [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Terminating instance [ 934.321043] env[62914]: DEBUG nova.compute.manager [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 934.321287] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 934.323036] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1addd847-8ea4-4c79-8863-3ef3d3439f5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.333830] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 934.341668] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-322bb43d-8325-45bf-b582-9b8bf3c9c730 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.352600] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832175, 'name': CreateSnapshot_Task, 'duration_secs': 0.502524} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.354395] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 934.354666] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 934.354666] env[62914]: value = "task-4832177" [ 934.354666] env[62914]: _type = "Task" [ 934.354666] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.355753] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd645055-7ccd-4396-bf69-a2f8e7917f67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.367320] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.462838] env[62914]: DEBUG nova.network.neutron [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updated VIF entry in instance network info cache for port 3dc57e52-6e86-4d59-bf3e-c46b60446825. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 934.462838] env[62914]: DEBUG nova.network.neutron [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [{"id": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "address": "fa:16:3e:40:22:1e", "network": {"id": "38ee6295-fc54-41ed-877f-896284874e58", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-441485595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23c2dfbda62544b8bdba7832e31a0f27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc57e52-6e", "ovs_interfaceid": "3dc57e52-6e86-4d59-bf3e-c46b60446825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.501022] env[62914]: DEBUG nova.compute.utils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.503848] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 934.504195] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.509498] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.550272] env[62914]: DEBUG nova.policy [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3757ee859d1a4cebbcc504c8c92f6489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1873cee9895d48cb97914fd7ca8392a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 934.591672] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 934.673559] env[62914]: DEBUG oslo_vmware.api [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832176, 'name': ReconfigVM_Task, 'duration_secs': 0.221605} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.673943] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a034b73e-1c65-42e1-9f07-eb8f6cda5e07 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Reconfigured VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 934.767508] env[62914]: INFO nova.compute.manager [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Took 40.68 seconds to build instance. [ 934.870759] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832177, 'name': PowerOffVM_Task, 'duration_secs': 0.483093} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.871202] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 934.871745] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 934.871745] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-641be1f7-5809-4151-bc6c-bc8e849f1504 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.881356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 934.883023] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8286f2ae-9f78-4d31-a680-5b4d8dc8219d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.892324] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 934.892324] env[62914]: value = "task-4832179" [ 934.892324] env[62914]: _type = "Task" [ 934.892324] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.901591] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832179, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.916616] env[62914]: DEBUG nova.network.neutron [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Updating instance_info_cache with network_info: [{"id": "8074d5d3-e32f-45b2-94c7-d1c4105fb78d", "address": "fa:16:3e:b8:0a:92", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8074d5d3-e3", "ovs_interfaceid": "8074d5d3-e32f-45b2-94c7-d1c4105fb78d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.959016] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 934.959603] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 934.959603] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Deleting the datastore file [datastore1] cca4bbf9-8864-4805-b95e-954e6b570eae {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.959810] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7813c448-0505-4ecd-abfc-2fd0e2326974 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.965252] env[62914]: DEBUG oslo_concurrency.lockutils [req-0ad925d7-9f8a-4897-aa36-d67480bd69c6 req-fc70eeaf-96b5-4a29-8481-8a668b60e83e service nova] Releasing lock "refresh_cache-cca4bbf9-8864-4805-b95e-954e6b570eae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.969637] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for the task: (returnval){ [ 934.969637] env[62914]: value = "task-4832180" [ 934.969637] env[62914]: _type = "Task" [ 934.969637] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.979134] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.008485] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 935.031879] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Successfully created port: 576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.270473] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2c6be884-d7f6-42a2-94db-11482692f6a0 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.194s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.345521] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "769c3873-7480-47de-894b-40dbf3f2f7f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.345811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.346173] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "769c3873-7480-47de-894b-40dbf3f2f7f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.347201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.347201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.350819] env[62914]: INFO nova.compute.manager [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Terminating instance [ 935.353796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "refresh_cache-769c3873-7480-47de-894b-40dbf3f2f7f0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.353998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "refresh_cache-769c3873-7480-47de-894b-40dbf3f2f7f0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.354344] env[62914]: DEBUG nova.network.neutron [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 935.413432] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832179, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.419797] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "refresh_cache-47aa2783-367e-4445-8261-7c75eb7561ab" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.419797] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance network_info: |[{"id": "8074d5d3-e32f-45b2-94c7-d1c4105fb78d", "address": "fa:16:3e:b8:0a:92", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8074d5d3-e3", "ovs_interfaceid": "8074d5d3-e32f-45b2-94c7-d1c4105fb78d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 935.420045] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:0a:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '456bd8a2-0fb6-4b17-9d25-08e7995c5184', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8074d5d3-e32f-45b2-94c7-d1c4105fb78d', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.429210] env[62914]: DEBUG oslo.service.loopingcall [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.432354] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 935.432794] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cde1031-0fac-4a37-9879-51f182b2bb07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.454243] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.454243] env[62914]: value = "task-4832181" [ 935.454243] env[62914]: _type = "Task" [ 935.454243] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.462840] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832181, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.482799] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.525270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.525723] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.542746] env[62914]: DEBUG nova.compute.manager [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 935.542746] env[62914]: DEBUG nova.compute.manager [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 935.543758] env[62914]: DEBUG oslo_concurrency.lockutils [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.543758] env[62914]: DEBUG oslo_concurrency.lockutils [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.543758] env[62914]: DEBUG nova.network.neutron [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 935.557677] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6be0eb-6a2b-4851-b54c-b9cf8caea230 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.566435] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4ed6b2-546b-4167-8b04-6fe4a29120a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.601730] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3402b4a8-e496-434e-a586-1018457ae520 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.610984] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ff0041-4571-49c9-84a6-7343b1d2ea0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.635510] env[62914]: DEBUG nova.compute.provider_tree [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.902992] env[62914]: DEBUG nova.network.neutron [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 935.910585] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832179, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.968725] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832181, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.980585] env[62914]: DEBUG oslo_vmware.api [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Task: {'id': task-4832180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.596376} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.984018] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.984018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 935.984018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 935.984018] env[62914]: INFO nova.compute.manager [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Took 1.66 seconds to destroy the instance on the hypervisor. [ 935.984018] env[62914]: DEBUG oslo.service.loopingcall [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.984018] env[62914]: DEBUG nova.compute.manager [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 935.984018] env[62914]: DEBUG nova.network.neutron [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 936.032876] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 936.036307] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 936.081901] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.082253] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.083996] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.084504] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.084756] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.085007] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.085982] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b91c1d-9d61-42b0-a3a6-acfec5ecca5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.096397] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82601c5-a993-48e5-be75-10a030dda9b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.116585] env[62914]: DEBUG nova.network.neutron [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.139628] env[62914]: DEBUG nova.scheduler.client.report [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.409265] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832179, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.476381] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832181, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.573692] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.619355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "refresh_cache-769c3873-7480-47de-894b-40dbf3f2f7f0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.620218] env[62914]: DEBUG nova.compute.manager [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 936.620701] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 936.622017] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac98e04-99e3-4423-a4dc-8148e57a6b27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.632629] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 936.633357] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c553b2a-3a04-40ea-8a83-e7ec46bb11d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.643237] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 936.643237] env[62914]: value = "task-4832182" [ 936.643237] env[62914]: _type = "Task" [ 936.643237] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.646530] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.647277] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 936.657060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 31.869s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.667240] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.903758] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Successfully updated port: 576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.908091] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832179, 'name': CloneVM_Task, 'duration_secs': 1.765073} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.908834] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Created linked-clone VM from snapshot [ 936.909596] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10728bb2-0def-4a8a-9f59-d29d37622a3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.918607] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Uploading image 31946e2b-796c-4aa4-b2c0-a1d66b1e72e7 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 936.948557] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 936.948557] env[62914]: value = "vm-941998" [ 936.948557] env[62914]: _type = "VirtualMachine" [ 936.948557] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 936.949409] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e3be9812-5d4a-4bdc-ba54-574c5cd5527a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.958913] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease: (returnval){ [ 936.958913] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bbf07-7fa6-57a0-6b1d-f972ed4c8ae3" [ 936.958913] env[62914]: _type = "HttpNfcLease" [ 936.958913] env[62914]: } obtained for exporting VM: (result){ [ 936.958913] env[62914]: value = "vm-941998" [ 936.958913] env[62914]: _type = "VirtualMachine" [ 936.958913] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 936.959338] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the lease: (returnval){ [ 936.959338] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bbf07-7fa6-57a0-6b1d-f972ed4c8ae3" [ 936.959338] env[62914]: _type = "HttpNfcLease" [ 936.959338] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 936.971101] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 936.971101] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bbf07-7fa6-57a0-6b1d-f972ed4c8ae3" [ 936.971101] env[62914]: _type = "HttpNfcLease" [ 936.971101] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 936.974531] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 936.974531] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522bbf07-7fa6-57a0-6b1d-f972ed4c8ae3" [ 936.974531] env[62914]: _type = "HttpNfcLease" [ 936.974531] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 936.975127] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832181, 'name': CreateVM_Task, 'duration_secs': 1.492493} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.975677] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3072b0-e083-48d1-87a1-d96731bd5016 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.978237] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 936.979052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.979201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.979597] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 936.980292] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0d1e517-33b8-4eba-a5e0-fa41786470e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.986243] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 936.986541] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 936.989128] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 936.989128] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dd7aad-51c7-f63c-c7b7-649434304abf" [ 936.989128] env[62914]: _type = "Task" [ 936.989128] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.049989] env[62914]: DEBUG nova.network.neutron [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 937.050448] env[62914]: DEBUG nova.network.neutron [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.055097] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dd7aad-51c7-f63c-c7b7-649434304abf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.092822] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f6019ffd-8b0d-427d-8287-85aeb3b2160c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.154850] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832182, 'name': PowerOffVM_Task, 'duration_secs': 0.461145} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.155168] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 937.155344] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 937.155615] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-018d0620-0a1c-48c7-a963-71824bb9d93e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.158149] env[62914]: DEBUG nova.compute.utils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.159618] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 937.163144] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 937.217680] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 937.217911] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 937.218128] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleting the datastore file [datastore1] 769c3873-7480-47de-894b-40dbf3f2f7f0 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.218424] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5373c93c-a0fc-4d3f-a05b-ce12f472ffe3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.229113] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 937.229113] env[62914]: value = "task-4832185" [ 937.229113] env[62914]: _type = "Task" [ 937.229113] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.238262] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.286870] env[62914]: DEBUG nova.network.neutron [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.296923] env[62914]: DEBUG nova.policy [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3757ee859d1a4cebbcc504c8c92f6489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1873cee9895d48cb97914fd7ca8392a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 937.409009] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.409187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.409356] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 937.501297] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52dd7aad-51c7-f63c-c7b7-649434304abf, 'name': SearchDatastore_Task, 'duration_secs': 0.009835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.505080] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.505635] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.505970] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.506204] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.506493] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.507183] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e833615-0646-41fe-b0a0-0cc8d2cdcf60 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.518216] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.518815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 937.519903] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-187ca525-ee72-4941-8d53-b8660986101f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.531839] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 937.531839] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad3922-81df-5a61-7392-f2695ef6b4eb" [ 937.531839] env[62914]: _type = "Task" [ 937.531839] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.541615] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad3922-81df-5a61-7392-f2695ef6b4eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.558229] env[62914]: DEBUG oslo_concurrency.lockutils [req-c4d9d8d7-9cb1-4e06-8dcb-ecfad8c69a48 req-f2bd53ab-a148-4b08-9ad7-83c09c644997 service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.663891] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 937.740303] env[62914]: DEBUG oslo_vmware.api [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283888} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.742071] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.742071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 937.742071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 937.742071] env[62914]: INFO nova.compute.manager [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 937.742071] env[62914]: DEBUG oslo.service.loopingcall [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.742071] env[62914]: DEBUG nova.compute.manager [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 937.742071] env[62914]: DEBUG nova.network.neutron [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 937.773475] env[62914]: DEBUG nova.network.neutron [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.790967] env[62914]: INFO nova.compute.manager [-] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Took 1.81 seconds to deallocate network for instance. [ 937.825742] env[62914]: DEBUG nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Received event network-vif-plugged-576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 937.825742] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Acquiring lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.825742] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.825742] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.825742] env[62914]: DEBUG nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] No waiting events found dispatching network-vif-plugged-576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 937.825742] env[62914]: WARNING nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Received unexpected event network-vif-plugged-576d897f-ee4d-4cda-bd9b-c835b51d2113 for instance with vm_state building and task_state spawning. [ 937.825742] env[62914]: DEBUG nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Received event network-vif-deleted-3dc57e52-6e86-4d59-bf3e-c46b60446825 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 937.825742] env[62914]: DEBUG nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Received event network-changed-576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 937.826043] env[62914]: DEBUG nova.compute.manager [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Refreshing instance network info cache due to event network-changed-576d897f-ee4d-4cda-bd9b-c835b51d2113. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 937.826245] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Acquiring lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.841703] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cb3803-f492-468c-a81d-a904e007eefb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.858919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b3081c-ad97-4cae-8139-21e204beeadb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.921963] env[62914]: DEBUG nova.compute.manager [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Received event network-changed-94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 937.922331] env[62914]: DEBUG nova.compute.manager [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Refreshing instance network info cache due to event network-changed-94d0e4cd-493e-4e41-89dc-b0636889e9d9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 937.923050] env[62914]: DEBUG oslo_concurrency.lockutils [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.923517] env[62914]: DEBUG oslo_concurrency.lockutils [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.923771] env[62914]: DEBUG nova.network.neutron [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Refreshing network info cache for port 94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.926320] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a798b4c-3379-479c-9688-ae9439c4b610 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.939958] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b156d68c-6caa-4b3d-afe2-bd4b678569f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.959812] env[62914]: DEBUG nova.compute.provider_tree [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.046729] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad3922-81df-5a61-7392-f2695ef6b4eb, 'name': SearchDatastore_Task, 'duration_secs': 0.01991} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.048088] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57d25e2d-bc48-40d5-8c51-ac71b438f677 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.054992] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 938.054992] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf1a68-f036-fca0-13ca-3c4bd19e101e" [ 938.054992] env[62914]: _type = "Task" [ 938.054992] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.064539] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf1a68-f036-fca0-13ca-3c4bd19e101e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.229897] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 938.277649] env[62914]: DEBUG nova.network.neutron [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.308269] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.329176] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Successfully created port: 27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.463518] env[62914]: DEBUG nova.scheduler.client.report [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.533600] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Updating instance_info_cache with network_info: [{"id": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "address": "fa:16:3e:9f:92:39", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap576d897f-ee", "ovs_interfaceid": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.570720] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf1a68-f036-fca0-13ca-3c4bd19e101e, 'name': SearchDatastore_Task, 'duration_secs': 0.016873} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.570720] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.570950] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 47aa2783-367e-4445-8261-7c75eb7561ab/47aa2783-367e-4445-8261-7c75eb7561ab.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 938.571395] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbb77ae1-483a-472c-852d-835d45bfcab8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.580460] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 938.580460] env[62914]: value = "task-4832186" [ 938.580460] env[62914]: _type = "Task" [ 938.580460] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.592340] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.684145] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 938.723022] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.723022] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.723022] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.723422] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.724694] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.724694] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.725124] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.725413] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.725619] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.725867] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.726328] env[62914]: DEBUG nova.virt.hardware [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.730038] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b93dd1-01ca-4199-963b-869572957696 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.754472] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ff3ce8-3b27-44e6-bd2d-cc4b90258dd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.782928] env[62914]: INFO nova.compute.manager [-] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Took 1.04 seconds to deallocate network for instance. [ 939.039635] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.039635] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Instance network_info: |[{"id": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "address": "fa:16:3e:9f:92:39", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap576d897f-ee", "ovs_interfaceid": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 939.039635] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Acquired lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.039635] env[62914]: DEBUG nova.network.neutron [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Refreshing network info cache for port 576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 939.040425] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:92:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '576d897f-ee4d-4cda-bd9b-c835b51d2113', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.050577] env[62914]: DEBUG oslo.service.loopingcall [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.052208] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 939.052439] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88b5ab42-f2c0-4501-be5e-6ec47e28f788 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.072031] env[62914]: DEBUG nova.network.neutron [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updated VIF entry in instance network info cache for port 94d0e4cd-493e-4e41-89dc-b0636889e9d9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 939.072450] env[62914]: DEBUG nova.network.neutron [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.076849] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.076849] env[62914]: value = "task-4832187" [ 939.076849] env[62914]: _type = "Task" [ 939.076849] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.093955] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832186, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.099314] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832187, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.298021] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.475493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.818s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.481378] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.743s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.483197] env[62914]: INFO nova.compute.claims [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.582031] env[62914]: DEBUG oslo_concurrency.lockutils [req-c72fde44-a427-448b-97e7-8a2791cfac47 req-e94e78f9-eb53-4813-bdd3-f84de1a91c06 service nova] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.595419] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627939} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.599075] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 47aa2783-367e-4445-8261-7c75eb7561ab/47aa2783-367e-4445-8261-7c75eb7561ab.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 939.599379] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.599742] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832187, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.602601] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2bf35e8-6627-4258-9f8e-72e82f06dca5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.611916] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 939.611916] env[62914]: value = "task-4832188" [ 939.611916] env[62914]: _type = "Task" [ 939.611916] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.624263] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.931878] env[62914]: DEBUG nova.network.neutron [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Updated VIF entry in instance network info cache for port 576d897f-ee4d-4cda-bd9b-c835b51d2113. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 939.932356] env[62914]: DEBUG nova.network.neutron [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Updating instance_info_cache with network_info: [{"id": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "address": "fa:16:3e:9f:92:39", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap576d897f-ee", "ovs_interfaceid": "576d897f-ee4d-4cda-bd9b-c835b51d2113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.966446] env[62914]: DEBUG nova.compute.manager [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Received event network-vif-plugged-27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 939.966668] env[62914]: DEBUG oslo_concurrency.lockutils [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] Acquiring lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.966887] env[62914]: DEBUG oslo_concurrency.lockutils [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.967111] env[62914]: DEBUG oslo_concurrency.lockutils [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.967386] env[62914]: DEBUG nova.compute.manager [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] No waiting events found dispatching network-vif-plugged-27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.967594] env[62914]: WARNING nova.compute.manager [req-52cbf07b-e367-42ee-93ae-fecbfeb256e8 req-712d0208-f8ca-4cd9-b6d3-6b36239eb063 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Received unexpected event network-vif-plugged-27d5fb42-82df-4642-9ddc-5a34ed445dfc for instance with vm_state building and task_state spawning. [ 940.075544] env[62914]: INFO nova.scheduler.client.report [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocation for migration 8e667dab-3203-4a05-bc45-d9153ad9cc64 [ 940.095830] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832187, 'name': CreateVM_Task, 'duration_secs': 0.8096} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.096365] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 940.098187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.098187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.098187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.099524] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-905d51ee-b1e2-4ed8-97fc-e1d694804dc7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.107211] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 940.107211] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52163b04-aa9f-ab6f-a840-ec93db0e91d3" [ 940.107211] env[62914]: _type = "Task" [ 940.107211] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.120044] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52163b04-aa9f-ab6f-a840-ec93db0e91d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.125623] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084743} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.125926] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.126757] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0934ae5-e8a8-411a-8270-e2a99668d858 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.152572] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 47aa2783-367e-4445-8261-7c75eb7561ab/47aa2783-367e-4445-8261-7c75eb7561ab.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.154664] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Successfully updated port: 27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.155572] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35a3123d-5c03-402f-b464-1f36bfefa224 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.171284] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.171429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.171577] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 940.179579] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 940.179579] env[62914]: value = "task-4832189" [ 940.179579] env[62914]: _type = "Task" [ 940.179579] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.191762] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.435796] env[62914]: DEBUG oslo_concurrency.lockutils [req-8a086bdc-4b3a-4fc9-915e-e5ce147b44b1 req-0dc1d570-dd0d-486d-8400-53ad6dbaaaec service nova] Releasing lock "refresh_cache-13f2a615-aa95-411d-92f8-9ff1b6eba420" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.586018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-014dc0e9-bc98-48a8-a191-99ab7e7bec6b tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 39.361s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.619067] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52163b04-aa9f-ab6f-a840-ec93db0e91d3, 'name': SearchDatastore_Task, 'duration_secs': 0.034729} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.619542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.619905] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.620253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.620453] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.621291] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.621291] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-807a4c00-2218-4583-b3af-10590b5a03ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.636782] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.637010] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 940.637823] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1684fe3-e448-48e7-a68d-5cdaad4417ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.648407] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 940.648407] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d8c449-03f7-ade6-5bef-71c0edeafab0" [ 940.648407] env[62914]: _type = "Task" [ 940.648407] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.660319] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d8c449-03f7-ade6-5bef-71c0edeafab0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.691993] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.745181] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.052865] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8730fe-a1b1-4246-b2f1-8f73f4135954 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.062149] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7542eed-f2f7-4c97-a3d8-6c1bc487a3a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.096231] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a079fa-49e7-4866-ae18-5be88dfcbb61 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.105389] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba5efec-515a-40de-9f1e-05598d520e07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.122552] env[62914]: DEBUG nova.compute.provider_tree [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.125210] env[62914]: DEBUG nova.network.neutron [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Updating instance_info_cache with network_info: [{"id": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "address": "fa:16:3e:6b:9b:06", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27d5fb42-82", "ovs_interfaceid": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.160395] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d8c449-03f7-ade6-5bef-71c0edeafab0, 'name': SearchDatastore_Task, 'duration_secs': 0.027746} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.160841] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd47d5bd-82fb-4443-9b08-70a78321cc0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.168106] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 941.168106] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ccb0f-5aba-3965-dced-062e50ec37bd" [ 941.168106] env[62914]: _type = "Task" [ 941.168106] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.182232] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ccb0f-5aba-3965-dced-062e50ec37bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.192882] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832189, 'name': ReconfigVM_Task, 'duration_secs': 0.598867} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.193224] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 47aa2783-367e-4445-8261-7c75eb7561ab/47aa2783-367e-4445-8261-7c75eb7561ab.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.193947] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f97e185-2fba-46dd-b37a-91aca09e34b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.204859] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 941.204859] env[62914]: value = "task-4832190" [ 941.204859] env[62914]: _type = "Task" [ 941.204859] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.215564] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832190, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.630077] env[62914]: DEBUG nova.scheduler.client.report [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.633102] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.633403] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Instance network_info: |[{"id": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "address": "fa:16:3e:6b:9b:06", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27d5fb42-82", "ovs_interfaceid": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 941.633818] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:9b:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27d5fb42-82df-4642-9ddc-5a34ed445dfc', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.641606] env[62914]: DEBUG oslo.service.loopingcall [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.642227] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 941.642517] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae995ac8-d147-45f5-b439-ae04cd1f6759 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.667233] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.667233] env[62914]: value = "task-4832191" [ 941.667233] env[62914]: _type = "Task" [ 941.667233] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.692151] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ccb0f-5aba-3965-dced-062e50ec37bd, 'name': SearchDatastore_Task, 'duration_secs': 0.029715} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.692518] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.693301] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.693611] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 13f2a615-aa95-411d-92f8-9ff1b6eba420/13f2a615-aa95-411d-92f8-9ff1b6eba420.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 941.693924] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-992012a6-e50d-4265-95fc-63858da63f07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.704746] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 941.704746] env[62914]: value = "task-4832192" [ 941.704746] env[62914]: _type = "Task" [ 941.704746] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.720662] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.724485] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832190, 'name': Rename_Task, 'duration_secs': 0.218143} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.725331] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 941.725331] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12e1ab71-eb7c-461a-a8d8-7e34d308f0e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.733941] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 941.733941] env[62914]: value = "task-4832193" [ 941.733941] env[62914]: _type = "Task" [ 941.733941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.745493] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.957008] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.957286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.999270] env[62914]: DEBUG nova.compute.manager [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Received event network-changed-27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 941.999480] env[62914]: DEBUG nova.compute.manager [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Refreshing instance network info cache due to event network-changed-27d5fb42-82df-4642-9ddc-5a34ed445dfc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 941.999731] env[62914]: DEBUG oslo_concurrency.lockutils [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] Acquiring lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.001047] env[62914]: DEBUG oslo_concurrency.lockutils [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] Acquired lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.001047] env[62914]: DEBUG nova.network.neutron [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Refreshing network info cache for port 27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 942.144029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.145145] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 942.147876] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.823s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.148674] env[62914]: DEBUG nova.objects.instance [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lazy-loading 'resources' on Instance uuid 1fb67ac1-c0b7-48b9-8562-d457d46709bc {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.182469] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.219259] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832192, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.245895] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832193, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.413783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.414046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.414322] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.414549] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.414811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.418478] env[62914]: INFO nova.compute.manager [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Terminating instance [ 942.422404] env[62914]: DEBUG nova.compute.manager [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 942.422584] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 942.423511] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a127a006-5c44-420e-9de4-8445d0239d5d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.439388] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 942.439732] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-155b7c97-e6bc-4287-ac96-506899a181f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.450913] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 942.450913] env[62914]: value = "task-4832194" [ 942.450913] env[62914]: _type = "Task" [ 942.450913] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.462794] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 942.466240] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.652637] env[62914]: DEBUG nova.compute.utils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.656779] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 942.656957] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 942.682845] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.722510] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832192, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.748657] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832193, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.750605] env[62914]: DEBUG nova.policy [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 942.973354] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.979435] env[62914]: DEBUG nova.network.neutron [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Updated VIF entry in instance network info cache for port 27d5fb42-82df-4642-9ddc-5a34ed445dfc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 942.979792] env[62914]: DEBUG nova.network.neutron [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Updating instance_info_cache with network_info: [{"id": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "address": "fa:16:3e:6b:9b:06", "network": {"id": "df3aa1a5-c553-4852-8ea3-bb3d4390d2a1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1804925136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1873cee9895d48cb97914fd7ca8392a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27d5fb42-82", "ovs_interfaceid": "27d5fb42-82df-4642-9ddc-5a34ed445dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.004774] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.099545] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.099818] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.147994] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Successfully created port: 464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.164529] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 943.182255] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.220317] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832192, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.145939} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.220517] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 13f2a615-aa95-411d-92f8-9ff1b6eba420/13f2a615-aa95-411d-92f8-9ff1b6eba420.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 943.220796] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.221015] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6879274-afc7-4eeb-a4ea-65e68faf518b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.229984] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 943.229984] env[62914]: value = "task-4832195" [ 943.229984] env[62914]: _type = "Task" [ 943.229984] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.240392] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.256035] env[62914]: DEBUG oslo_vmware.api [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832193, 'name': PowerOnVM_Task, 'duration_secs': 1.212346} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.256810] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 943.257080] env[62914]: INFO nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Took 12.14 seconds to spawn the instance on the hypervisor. [ 943.257298] env[62914]: DEBUG nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 943.258270] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5438b539-dcc1-4dce-9107-b6cfbae7607f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.307108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b41d6a-b749-4c1e-80db-a575ae07e1c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.316649] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6b0c5d-3f4e-4509-af6d-9d63bc06bc62 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.352919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4ad223-f968-4f22-aa74-800a2cc15c45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.362269] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02f2354-64aa-4954-abc5-ab74616188d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.379814] env[62914]: DEBUG nova.compute.provider_tree [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.462606] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832194, 'name': PowerOffVM_Task, 'duration_secs': 0.718411} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.462913] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 943.463104] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 943.463409] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25349510-c0ca-4335-bdcc-a1b2ee24e6c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.486145] env[62914]: DEBUG oslo_concurrency.lockutils [req-bb532f6c-9aec-4818-98f1-8623bdfde559 req-17729f03-7191-4e50-8470-49e84921cde9 service nova] Releasing lock "refresh_cache-3e6a3787-3e9c-411c-9c3c-305a62061b47" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.602916] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 943.690882] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.745135] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.307885} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.745135] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.745573] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d542e648-974a-4a03-9041-6f643b060735 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.771630] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 13f2a615-aa95-411d-92f8-9ff1b6eba420/13f2a615-aa95-411d-92f8-9ff1b6eba420.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.775515] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e056c014-7cf7-461b-95a7-79a5e3bab2ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.794962] env[62914]: INFO nova.compute.manager [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Took 46.51 seconds to build instance. [ 943.803657] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 943.803657] env[62914]: value = "task-4832197" [ 943.803657] env[62914]: _type = "Task" [ 943.803657] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.817581] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832197, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.883801] env[62914]: DEBUG nova.scheduler.client.report [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.129412] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.186211] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 944.188960] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.228616] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 944.228795] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 944.229213] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.229423] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 944.229628] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.229753] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 944.229972] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 944.230190] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 944.230370] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 944.230550] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 944.230734] env[62914]: DEBUG nova.virt.hardware [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 944.231829] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4af3c9-cd7b-4ca6-9867-01373fb5899a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.241768] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38463a43-a679-429a-b45b-94c84451339b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.298484] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0658954a-c377-4871-96d1-403dc354c638 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.021s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.315862] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.327579] env[62914]: DEBUG oslo_concurrency.lockutils [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.327752] env[62914]: DEBUG oslo_concurrency.lockutils [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.327916] env[62914]: DEBUG nova.compute.manager [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 944.328921] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbe37d9-c4cd-44de-822c-86cfb07fccfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.337918] env[62914]: DEBUG nova.compute.manager [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 944.338629] env[62914]: DEBUG nova.objects.instance [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'flavor' on Instance uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.391426] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.243s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.394704] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.341s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.394704] env[62914]: DEBUG nova.objects.instance [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lazy-loading 'resources' on Instance uuid 29a177e4-b5d7-4249-8fc5-2316f6891536 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.412818] env[62914]: INFO nova.scheduler.client.report [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Deleted allocations for instance 1fb67ac1-c0b7-48b9-8562-d457d46709bc [ 944.686531] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.722423] env[62914]: DEBUG nova.compute.manager [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Received event network-vif-plugged-464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 944.722869] env[62914]: DEBUG oslo_concurrency.lockutils [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] Acquiring lock "af541b15-19ce-415a-b03e-cb605b780247-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.722949] env[62914]: DEBUG oslo_concurrency.lockutils [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] Lock "af541b15-19ce-415a-b03e-cb605b780247-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.723092] env[62914]: DEBUG oslo_concurrency.lockutils [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] Lock "af541b15-19ce-415a-b03e-cb605b780247-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.723278] env[62914]: DEBUG nova.compute.manager [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] No waiting events found dispatching network-vif-plugged-464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 944.723447] env[62914]: WARNING nova.compute.manager [req-067b6e57-6ca8-4649-900a-bfb3c1e38b59 req-50fc44f0-5636-4b17-bff1-2b40983a7a8f service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Received unexpected event network-vif-plugged-464c2387-9349-42aa-bab6-3d349ca0ed26 for instance with vm_state building and task_state spawning. [ 944.799278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "47aa2783-367e-4445-8261-7c75eb7561ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.799593] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.799816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.801357] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.801357] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.803056] env[62914]: INFO nova.compute.manager [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Terminating instance [ 944.805348] env[62914]: DEBUG nova.compute.manager [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 944.805550] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 944.809942] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9edabb5-4557-4e05-89bb-bcdfd60ac13f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.814019] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Successfully updated port: 464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.824103] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.826805] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 944.827148] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-289b2552-6254-43f3-95e7-e5fcd80b86f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.837032] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 944.837032] env[62914]: value = "task-4832198" [ 944.837032] env[62914]: _type = "Task" [ 944.837032] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.843979] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 944.843979] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3d69cda-54b1-4f9b-821b-aec1ace7efaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.849826] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.856228] env[62914]: DEBUG oslo_vmware.api [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 944.856228] env[62914]: value = "task-4832199" [ 944.856228] env[62914]: _type = "Task" [ 944.856228] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.871686] env[62914]: DEBUG oslo_vmware.api [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.922081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7c3a8dd-3512-4903-8c52-6b4e2b0d1e64 tempest-ListServerFiltersTestJSON-403224292 tempest-ListServerFiltersTestJSON-403224292-project-member] Lock "1fb67ac1-c0b7-48b9-8562-d457d46709bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.612s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.180034] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 945.180034] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 945.180034] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore1] 557c0538-fc4a-403a-a9cb-b706e2260b1c {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.180600] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76671179-5aa4-4035-94f4-b11004df9e71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.186831] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832191, 'name': CreateVM_Task, 'duration_secs': 3.513157} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.189447] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 945.190759] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.190759] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.190986] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.191256] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88382d4-2ba6-442f-969a-8bb96a739a90 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.195076] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 945.195076] env[62914]: value = "task-4832200" [ 945.195076] env[62914]: _type = "Task" [ 945.195076] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.199816] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 945.199816] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cef471-16fc-c086-cf7b-5551a7f2b5c8" [ 945.199816] env[62914]: _type = "Task" [ 945.199816] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.211222] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.217325] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cef471-16fc-c086-cf7b-5551a7f2b5c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.314821] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832197, 'name': ReconfigVM_Task, 'duration_secs': 1.363347} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.317751] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 13f2a615-aa95-411d-92f8-9ff1b6eba420/13f2a615-aa95-411d-92f8-9ff1b6eba420.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.318730] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.318905] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.319073] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.320937] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23877766-9cf9-45b1-af7a-30eec58688b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.330334] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 945.330334] env[62914]: value = "task-4832201" [ 945.330334] env[62914]: _type = "Task" [ 945.330334] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.348083] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832201, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.358592] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832198, 'name': PowerOffVM_Task, 'duration_secs': 0.185943} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.362420] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 945.362739] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 945.364072] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd045031-36ac-4adf-8b59-44f397f25bd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.366393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c16e46d-f634-4191-a2d7-780d07ce192d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.377629] env[62914]: DEBUG oslo_vmware.api [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832199, 'name': PowerOffVM_Task, 'duration_secs': 0.267332} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.380500] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 945.380826] env[62914]: DEBUG nova.compute.manager [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 945.382196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ae7553-fb91-4c76-8731-ee6533c3971f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.385782] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b6e2e4-7561-4078-9d45-19f6a9d36e24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.428205] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa95557c-0505-4f21-ac35-2775f1605c9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.439772] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7ab089-47a1-40e4-9db7-d4887cff2968 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.455416] env[62914]: DEBUG nova.compute.provider_tree [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.470186] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 945.470186] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 945.470186] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleting the datastore file [datastore2] 47aa2783-367e-4445-8261-7c75eb7561ab {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.470186] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a86a505c-fe2e-4d02-9d19-08afd184e335 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.477626] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 945.477626] env[62914]: value = "task-4832203" [ 945.477626] env[62914]: _type = "Task" [ 945.477626] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.488961] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.712168] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.718575] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cef471-16fc-c086-cf7b-5551a7f2b5c8, 'name': SearchDatastore_Task, 'duration_secs': 0.026478} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.719245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.720024] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.720024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.721098] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.721293] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.721679] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0156dcf0-0569-4228-a422-b1c8d24dfe96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.737488] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.737889] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 945.739037] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f3ebe1-c90f-4683-a0a4-0388739f5297 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.748506] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 945.748506] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fe14f8-bd83-923d-08d8-b8f2440b88ce" [ 945.748506] env[62914]: _type = "Task" [ 945.748506] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.759218] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fe14f8-bd83-923d-08d8-b8f2440b88ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.841041] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832201, 'name': Rename_Task, 'duration_secs': 0.282114} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.842817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 945.842817] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27ce2063-7435-4678-9832-96990dc0a5d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.850495] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 945.850495] env[62914]: value = "task-4832204" [ 945.850495] env[62914]: _type = "Task" [ 945.850495] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.860654] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.861772] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.933937] env[62914]: DEBUG oslo_concurrency.lockutils [None req-602122de-717e-4b94-8ee5-a12e3c544be7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.604s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.960913] env[62914]: DEBUG nova.scheduler.client.report [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.986556] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 945.988039] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8defd146-0409-48f0-a9ca-0bcde7b501c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.998823] env[62914]: DEBUG oslo_vmware.api [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26071} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.001099] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.001349] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 946.001539] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 946.002013] env[62914]: INFO nova.compute.manager [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Took 1.20 seconds to destroy the instance on the hypervisor. [ 946.002426] env[62914]: DEBUG oslo.service.loopingcall [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.002649] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 946.002863] env[62914]: ERROR oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk due to incomplete transfer. [ 946.003078] env[62914]: DEBUG nova.compute.manager [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 946.003237] env[62914]: DEBUG nova.network.neutron [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.005438] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-85115213-51c7-48d6-b40a-b3f7338d1fdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.018023] env[62914]: DEBUG oslo_vmware.rw_handles [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c276c-a8e8-c6bf-6cdc-565ef3bdf5e1/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 946.018023] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Uploaded image 31946e2b-796c-4aa4-b2c0-a1d66b1e72e7 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 946.018904] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 946.019244] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6281a745-cc2a-48e7-8c19-b296a4f1f33d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.027593] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 946.027593] env[62914]: value = "task-4832205" [ 946.027593] env[62914]: _type = "Task" [ 946.027593] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.041963] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832205, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.072183] env[62914]: DEBUG nova.network.neutron [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Updating instance_info_cache with network_info: [{"id": "464c2387-9349-42aa-bab6-3d349ca0ed26", "address": "fa:16:3e:8a:93:7b", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c2387-93", "ovs_interfaceid": "464c2387-9349-42aa-bab6-3d349ca0ed26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.207334] env[62914]: DEBUG oslo_vmware.api [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.553179} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.207627] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.207820] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 946.210518] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 946.210518] env[62914]: INFO nova.compute.manager [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Took 3.79 seconds to destroy the instance on the hypervisor. [ 946.210518] env[62914]: DEBUG oslo.service.loopingcall [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.210518] env[62914]: DEBUG nova.compute.manager [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 946.210518] env[62914]: DEBUG nova.network.neutron [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.263496] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fe14f8-bd83-923d-08d8-b8f2440b88ce, 'name': SearchDatastore_Task, 'duration_secs': 0.023499} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.267025] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7001677-03a0-46a1-95db-e149fa47f476 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.272529] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 946.272529] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5285aa22-e816-ef26-eb5c-f84fe9684f8d" [ 946.272529] env[62914]: _type = "Task" [ 946.272529] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.282482] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5285aa22-e816-ef26-eb5c-f84fe9684f8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.370813] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832204, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.468589] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.472427] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.536s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.473677] env[62914]: INFO nova.compute.claims [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.515418] env[62914]: INFO nova.scheduler.client.report [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Deleted allocations for instance 29a177e4-b5d7-4249-8fc5-2316f6891536 [ 946.523218] env[62914]: DEBUG nova.compute.manager [req-848323ad-6997-4288-b11a-f3e894f6de52 req-11a44205-488e-4d62-9d1d-90c8ed1182a0 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Received event network-vif-deleted-8074d5d3-e32f-45b2-94c7-d1c4105fb78d {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 946.523575] env[62914]: INFO nova.compute.manager [req-848323ad-6997-4288-b11a-f3e894f6de52 req-11a44205-488e-4d62-9d1d-90c8ed1182a0 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Neutron deleted interface 8074d5d3-e32f-45b2-94c7-d1c4105fb78d; detaching it from the instance and deleting it from the info cache [ 946.523801] env[62914]: DEBUG nova.network.neutron [req-848323ad-6997-4288-b11a-f3e894f6de52 req-11a44205-488e-4d62-9d1d-90c8ed1182a0 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.543435] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832205, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.577372] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.577372] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Instance network_info: |[{"id": "464c2387-9349-42aa-bab6-3d349ca0ed26", "address": "fa:16:3e:8a:93:7b", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c2387-93", "ovs_interfaceid": "464c2387-9349-42aa-bab6-3d349ca0ed26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 946.577372] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:93:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '464c2387-9349-42aa-bab6-3d349ca0ed26', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.591108] env[62914]: DEBUG oslo.service.loopingcall [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.592380] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 946.593740] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0789cc4e-29fc-481e-9fc3-5cf83564c6b5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.621656] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.621656] env[62914]: value = "task-4832206" [ 946.621656] env[62914]: _type = "Task" [ 946.621656] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.632699] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832206, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.784270] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5285aa22-e816-ef26-eb5c-f84fe9684f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.029658} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.784728] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.785169] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 3e6a3787-3e9c-411c-9c3c-305a62061b47/3e6a3787-3e9c-411c-9c3c-305a62061b47.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 946.785592] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bdeb633-0137-4632-817d-69aeb6ddd4e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.795145] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 946.795145] env[62914]: value = "task-4832207" [ 946.795145] env[62914]: _type = "Task" [ 946.795145] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.804988] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.849189] env[62914]: DEBUG nova.objects.instance [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'flavor' on Instance uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.867474] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832204, 'name': PowerOnVM_Task, 'duration_secs': 0.87785} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.869850] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 946.870303] env[62914]: INFO nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Took 10.84 seconds to spawn the instance on the hypervisor. [ 946.874023] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 946.874023] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb188db7-3711-4aa1-8a44-d42fae2cd2e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.904674] env[62914]: DEBUG nova.compute.manager [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Received event network-changed-464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 946.904920] env[62914]: DEBUG nova.compute.manager [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Refreshing instance network info cache due to event network-changed-464c2387-9349-42aa-bab6-3d349ca0ed26. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 946.905137] env[62914]: DEBUG oslo_concurrency.lockutils [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] Acquiring lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.905293] env[62914]: DEBUG oslo_concurrency.lockutils [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] Acquired lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.905462] env[62914]: DEBUG nova.network.neutron [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Refreshing network info cache for port 464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 946.961044] env[62914]: DEBUG nova.network.neutron [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.024526] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0807f5ee-af07-461c-89f3-94984780e4be tempest-SecurityGroupsTestJSON-92077384 tempest-SecurityGroupsTestJSON-92077384-project-member] Lock "29a177e4-b5d7-4249-8fc5-2316f6891536" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.539s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.026070] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed68211d-3df5-43a6-aaf4-5ce07a468f36 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.041775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0edd66-24a2-4805-9642-a00b1ec892e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.058363] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832205, 'name': Destroy_Task, 'duration_secs': 0.681682} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.059369] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Destroyed the VM [ 947.060356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 947.060549] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5076da5d-4040-4c4f-b0eb-f71ac40c37a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.069673] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 947.069673] env[62914]: value = "task-4832208" [ 947.069673] env[62914]: _type = "Task" [ 947.069673] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.088354] env[62914]: DEBUG nova.compute.manager [req-848323ad-6997-4288-b11a-f3e894f6de52 req-11a44205-488e-4d62-9d1d-90c8ed1182a0 service nova] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Detach interface failed, port_id=8074d5d3-e32f-45b2-94c7-d1c4105fb78d, reason: Instance 47aa2783-367e-4445-8261-7c75eb7561ab could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 947.095272] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.116388] env[62914]: DEBUG nova.network.neutron [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.136622] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832206, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.308837] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832207, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.356279] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.356622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.356884] env[62914]: DEBUG nova.network.neutron [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 947.357098] env[62914]: DEBUG nova.objects.instance [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'info_cache' on Instance uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.393253] env[62914]: INFO nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Took 46.38 seconds to build instance. [ 947.464046] env[62914]: INFO nova.compute.manager [-] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Took 1.46 seconds to deallocate network for instance. [ 947.584387] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.621384] env[62914]: INFO nova.compute.manager [-] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Took 1.41 seconds to deallocate network for instance. [ 947.640460] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832206, 'name': CreateVM_Task, 'duration_secs': 0.695675} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.641537] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 947.641537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.641773] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.642109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.642449] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f700d23e-8751-4100-9ca5-eaca500be396 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.649679] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 947.649679] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224bd5-8fed-13d8-56af-611014697590" [ 947.649679] env[62914]: _type = "Task" [ 947.649679] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.663327] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224bd5-8fed-13d8-56af-611014697590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.771308] env[62914]: DEBUG nova.network.neutron [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Updated VIF entry in instance network info cache for port 464c2387-9349-42aa-bab6-3d349ca0ed26. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.771308] env[62914]: DEBUG nova.network.neutron [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Updating instance_info_cache with network_info: [{"id": "464c2387-9349-42aa-bab6-3d349ca0ed26", "address": "fa:16:3e:8a:93:7b", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c2387-93", "ovs_interfaceid": "464c2387-9349-42aa-bab6-3d349ca0ed26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.812378] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80603} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.812696] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 3e6a3787-3e9c-411c-9c3c-305a62061b47/3e6a3787-3e9c-411c-9c3c-305a62061b47.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 947.812917] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.813207] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bcfed0d-6db5-4f1e-ae20-9b98954dec41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.821394] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 947.821394] env[62914]: value = "task-4832209" [ 947.821394] env[62914]: _type = "Task" [ 947.821394] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.838120] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.863926] env[62914]: DEBUG nova.objects.base [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 947.865216] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.865470] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.895132] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.891s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.975039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.083699] env[62914]: DEBUG oslo_vmware.api [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832208, 'name': RemoveSnapshot_Task, 'duration_secs': 0.801174} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.083995] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 948.084236] env[62914]: INFO nova.compute.manager [None req-469b8e67-6409-48a0-926e-ed705aaab651 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 14.80 seconds to snapshot the instance on the hypervisor. [ 948.091173] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c856ed93-f682-4ec9-9838-bec84704c056 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.099757] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f05be-9064-43e9-b7f6-00b4f349c0de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.923728] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.925108] env[62914]: DEBUG oslo_concurrency.lockutils [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] Releasing lock "refresh_cache-af541b15-19ce-415a-b03e-cb605b780247" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.925403] env[62914]: DEBUG nova.compute.manager [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Received event network-vif-deleted-e7fd222f-0127-4616-8d16-801a8c35a0d1 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 948.925499] env[62914]: INFO nova.compute.manager [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Neutron deleted interface e7fd222f-0127-4616-8d16-801a8c35a0d1; detaching it from the instance and deleting it from the info cache [ 948.925667] env[62914]: DEBUG nova.network.neutron [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.928835] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 948.941336] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1037cce6-2131-4849-9a61-cc36f0f37122 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.952382] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126017} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.958679] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.959554] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52224bd5-8fed-13d8-56af-611014697590, 'name': SearchDatastore_Task, 'duration_secs': 0.016047} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.960583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364cc0c0-39b1-46a1-b534-56f520b0e33a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.963437] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.963735] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.964844] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.964844] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.964844] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.966082] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66dadc77-3059-4143-a047-af42512fdd39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.971025] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e2ef0d5-6dc9-40f8-a629-74c7595c7385 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.995885] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 3e6a3787-3e9c-411c-9c3c-305a62061b47/3e6a3787-3e9c-411c-9c3c-305a62061b47.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.008088] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-891a4b89-0379-415d-9e06-628c0d6c6391 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.022655] env[62914]: DEBUG nova.compute.provider_tree [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.025922] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.025922] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 949.025922] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fb0792d-6254-4cc6-9a3d-e8c45c09c894 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.033400] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 949.033400] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524e65a4-6d57-ce53-b7b4-d93c775b6cee" [ 949.033400] env[62914]: _type = "Task" [ 949.033400] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.038568] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 949.038568] env[62914]: value = "task-4832210" [ 949.038568] env[62914]: _type = "Task" [ 949.038568] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.047079] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524e65a4-6d57-ce53-b7b4-d93c775b6cee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.056096] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.437350] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a4cf8e5-80fc-4cd9-b0ed-829023bcb580 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.448184] env[62914]: DEBUG nova.network.neutron [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.453833] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9674b52a-1efc-4199-9a78-6771e56cc36d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.470094] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.501854] env[62914]: DEBUG nova.compute.manager [req-f51b8c69-e567-4cd5-8bbd-d06fb3c12a14 req-38caa7c2-d5af-44d1-a898-d139200313a1 service nova] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Detach interface failed, port_id=e7fd222f-0127-4616-8d16-801a8c35a0d1, reason: Instance 557c0538-fc4a-403a-a9cb-b706e2260b1c could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 949.530055] env[62914]: DEBUG nova.scheduler.client.report [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 949.546367] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524e65a4-6d57-ce53-b7b4-d93c775b6cee, 'name': SearchDatastore_Task, 'duration_secs': 0.018776} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.550836] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f87efd8-769d-4370-b9cc-e2f279b89569 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.560763] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832210, 'name': ReconfigVM_Task, 'duration_secs': 0.498556} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.562539] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 3e6a3787-3e9c-411c-9c3c-305a62061b47/3e6a3787-3e9c-411c-9c3c-305a62061b47.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.563278] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 949.563278] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5212f9b9-830c-3af5-d8b0-4e3684b12f8b" [ 949.563278] env[62914]: _type = "Task" [ 949.563278] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.563845] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-284ca31e-f168-4a9c-a727-d6461b9911d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.576780] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5212f9b9-830c-3af5-d8b0-4e3684b12f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.578823] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 949.578823] env[62914]: value = "task-4832211" [ 949.578823] env[62914]: _type = "Task" [ 949.578823] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.589338] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832211, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.951581] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.039882] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.568s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.040025] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 950.042954] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.059s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.043268] env[62914]: DEBUG nova.objects.instance [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 83de3d7c-2308-4678-ae90-a30705f6a8c4 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.077656] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5212f9b9-830c-3af5-d8b0-4e3684b12f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.019516} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.077977] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.078760] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] af541b15-19ce-415a-b03e-cb605b780247/af541b15-19ce-415a-b03e-cb605b780247.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 950.078760] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f160e64-92ed-4dd7-b2e7-4b44ddfee308 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.093552] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832211, 'name': Rename_Task, 'duration_secs': 0.23546} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.095326] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 950.096082] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 950.096082] env[62914]: value = "task-4832212" [ 950.096082] env[62914]: _type = "Task" [ 950.096082] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.096300] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f27eef3-fe85-4e2b-b004-3b8055441673 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.108892] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.110539] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 950.110539] env[62914]: value = "task-4832213" [ 950.110539] env[62914]: _type = "Task" [ 950.110539] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.122138] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.457641] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 950.457984] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05547617-101c-4c34-b905-e23d8d498c04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.467428] env[62914]: DEBUG oslo_vmware.api [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 950.467428] env[62914]: value = "task-4832214" [ 950.467428] env[62914]: _type = "Task" [ 950.467428] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.479203] env[62914]: DEBUG oslo_vmware.api [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.546591] env[62914]: DEBUG nova.compute.utils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 950.548215] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 950.548414] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 950.623463] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832212, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.624858] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832213, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.883670] env[62914]: DEBUG nova.policy [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fb02b5af55441b0b788b739fc8dc623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5adc4dc554ed4fe69f214161fd8ab9b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.984706] env[62914]: DEBUG oslo_vmware.api [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832214, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.056198] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 951.119124] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.815366} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.125620] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] af541b15-19ce-415a-b03e-cb605b780247/af541b15-19ce-415a-b03e-cb605b780247.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 951.125620] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.125929] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a92cd1-e0f6-46c0-8559-8887c3d73bcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.130021] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f5150da-ed9a-49c1-855e-4a2196367cfc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.137952] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832213, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.141256] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 951.141256] env[62914]: value = "task-4832215" [ 951.141256] env[62914]: _type = "Task" [ 951.141256] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.142327] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044cb078-c77e-4991-b84c-aab2c10a209a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.185663] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101e099a-49bd-4afe-b8a5-4c4417f4fcb0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.195625] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78efd009-c891-4ffe-87d8-eb4c3f1274df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.213091] env[62914]: DEBUG nova.compute.provider_tree [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.397056] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Successfully created port: 8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.483865] env[62914]: DEBUG oslo_vmware.api [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832214, 'name': PowerOnVM_Task, 'duration_secs': 0.639284} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.484185] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 951.484374] env[62914]: DEBUG nova.compute.manager [None req-9c8d9d1c-7e7e-447c-9a5e-4e443d7f33b5 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 951.485241] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415cf378-9462-400e-b4bf-42232756eb84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.629593] env[62914]: DEBUG oslo_vmware.api [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832213, 'name': PowerOnVM_Task, 'duration_secs': 1.129942} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.630435] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 951.630435] env[62914]: INFO nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Took 12.95 seconds to spawn the instance on the hypervisor. [ 951.630435] env[62914]: DEBUG nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 951.631145] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df825565-3895-4b7f-8623-72411d4ae1cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.665598] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203609} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.666053] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.668051] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845cc518-494f-4da9-b840-1e08122f1291 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.701055] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] af541b15-19ce-415a-b03e-cb605b780247/af541b15-19ce-415a-b03e-cb605b780247.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.701921] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ace5547-c3a9-41f6-82c5-9042e2abf8eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.720662] env[62914]: DEBUG nova.scheduler.client.report [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.732086] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 951.732086] env[62914]: value = "task-4832216" [ 951.732086] env[62914]: _type = "Task" [ 951.732086] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.748718] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832216, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.080824] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.113541] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.114361] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.114684] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.115034] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.115348] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.115640] env[62914]: DEBUG nova.virt.hardware [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.116635] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f1c803-c52d-4859-b966-7a8172e1b25d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.126236] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c664ef-8519-4006-b417-e09d5161c021 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.154749] env[62914]: INFO nova.compute.manager [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Took 48.68 seconds to build instance. [ 952.226582] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.184s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.232577] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.238s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.232901] env[62914]: DEBUG nova.objects.instance [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'resources' on Instance uuid 43227b1e-c90a-47d0-a4f5-fd0af0826e94 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.245625] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832216, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.270030] env[62914]: INFO nova.scheduler.client.report [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 83de3d7c-2308-4678-ae90-a30705f6a8c4 [ 952.658108] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7081fb30-9229-4cbe-84ff-3b560deccf53 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.616s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.698681] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.699087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.699238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.699451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.699636] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.704445] env[62914]: INFO nova.compute.manager [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Terminating instance [ 952.707611] env[62914]: DEBUG nova.compute.manager [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 952.707988] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.708861] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42872b5-de33-4fe6-9a93-b6a2ec5537b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.719149] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 952.719555] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30c86638-f40e-41d1-8446-715cdec22567 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.731397] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 952.731397] env[62914]: value = "task-4832217" [ 952.731397] env[62914]: _type = "Task" [ 952.731397] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.755922] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.761400] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832216, 'name': ReconfigVM_Task, 'duration_secs': 1.008624} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.762150] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Reconfigured VM instance instance-00000054 to attach disk [datastore1] af541b15-19ce-415a-b03e-cb605b780247/af541b15-19ce-415a-b03e-cb605b780247.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.767134] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f59cabb-8fd3-4326-b0bb-ee60ab56dd67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.777991] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 952.777991] env[62914]: value = "task-4832218" [ 952.777991] env[62914]: _type = "Task" [ 952.777991] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.779425] env[62914]: DEBUG oslo_concurrency.lockutils [None req-632b3ef9-2be8-4c4a-8d27-3cf992827c9f tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "83de3d7c-2308-4678-ae90-a30705f6a8c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.684s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.797466] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832218, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.256159] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832217, 'name': PowerOffVM_Task, 'duration_secs': 0.314566} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.256159] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.256624] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.257304] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e0b5670-7111-46fb-ba82-d67373eaab39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.298511] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832218, 'name': Rename_Task, 'duration_secs': 0.292478} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.298511] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 953.298511] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5dd759c5-f388-4211-9002-2eabe69ed236 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.308972] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 953.308972] env[62914]: value = "task-4832220" [ 953.308972] env[62914]: _type = "Task" [ 953.308972] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.318967] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.347024] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.347024] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.347024] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleting the datastore file [datastore2] ea06d3c3-d836-4e66-ac66-42f9886cd5de {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.347024] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a088841-bb77-4d83-8799-708d48f46be8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.355038] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 953.355038] env[62914]: value = "task-4832221" [ 953.355038] env[62914]: _type = "Task" [ 953.355038] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.362321] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fc3a00-af9b-464a-8801-eba7474001f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.372633] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.373683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2547e265-07ab-41e9-86e7-4bef5dbe225b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.412156] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1a3726-6922-4c00-a80f-365d53fd5a00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.415884] env[62914]: DEBUG nova.compute.manager [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Received event network-vif-plugged-8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 953.416819] env[62914]: DEBUG oslo_concurrency.lockutils [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] Acquiring lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.417151] env[62914]: DEBUG oslo_concurrency.lockutils [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.417399] env[62914]: DEBUG oslo_concurrency.lockutils [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.417646] env[62914]: DEBUG nova.compute.manager [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] No waiting events found dispatching network-vif-plugged-8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 953.417886] env[62914]: WARNING nova.compute.manager [req-c78cbbd3-6f71-470a-a27e-ae3a3eebbe15 req-942a04b2-1de0-45df-a088-2d618ed6c7d6 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Received unexpected event network-vif-plugged-8963aef9-1731-4bd4-b659-83eb9724f8f9 for instance with vm_state building and task_state spawning. [ 953.429751] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfe4be6-7e7a-42eb-a6cc-965179d8827b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.446074] env[62914]: DEBUG nova.compute.provider_tree [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.478114] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Successfully updated port: 8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.494042] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.494324] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.494761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.494761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.494879] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.498266] env[62914]: INFO nova.compute.manager [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Terminating instance [ 953.500141] env[62914]: DEBUG nova.compute.manager [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 953.500355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.501401] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a737df-3acf-496f-a18b-d0bab0cf8415 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.510900] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 953.511091] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-152a6bd4-c76f-49a2-b7da-9e292d440696 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.519352] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 953.519352] env[62914]: value = "task-4832222" [ 953.519352] env[62914]: _type = "Task" [ 953.519352] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.529277] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832222, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.715187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.715496] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.715730] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.715938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.716160] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.718458] env[62914]: INFO nova.compute.manager [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Terminating instance [ 953.720533] env[62914]: DEBUG nova.compute.manager [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 953.720751] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.721713] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34614091-3203-4225-be65-68f8804ff6f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.730517] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 953.730861] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6504f40-67f5-403b-b2ed-88a4055c1b50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.738668] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 953.738668] env[62914]: value = "task-4832223" [ 953.738668] env[62914]: _type = "Task" [ 953.738668] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.749667] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.827658] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832220, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.872183] env[62914]: DEBUG oslo_vmware.api [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273264} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.872711] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.873141] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 953.873527] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 953.873888] env[62914]: INFO nova.compute.manager [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Took 1.17 seconds to destroy the instance on the hypervisor. [ 953.875556] env[62914]: DEBUG oslo.service.loopingcall [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.875556] env[62914]: DEBUG nova.compute.manager [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 953.875556] env[62914]: DEBUG nova.network.neutron [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 953.950298] env[62914]: DEBUG nova.scheduler.client.report [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 953.985020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.985020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.985020] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 954.035732] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832222, 'name': PowerOffVM_Task, 'duration_secs': 0.315001} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.035732] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 954.035732] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 954.035732] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-907fd3d4-cc08-41ae-a3c4-eb64db5a5d82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.119032] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 954.119373] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 954.119567] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleting the datastore file [datastore1] 13f2a615-aa95-411d-92f8-9ff1b6eba420 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.119894] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00baa490-6737-48d0-8483-865156b25dfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.129604] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 954.129604] env[62914]: value = "task-4832225" [ 954.129604] env[62914]: _type = "Task" [ 954.129604] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.148126] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.257465] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832223, 'name': PowerOffVM_Task, 'duration_secs': 0.356356} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.257465] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 954.257465] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 954.257465] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f9685be-5b73-4818-bf7b-88e6d5fc0a78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.328158] env[62914]: DEBUG oslo_vmware.api [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832220, 'name': PowerOnVM_Task, 'duration_secs': 0.864994} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.328480] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 954.328690] env[62914]: INFO nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Took 10.14 seconds to spawn the instance on the hypervisor. [ 954.329478] env[62914]: DEBUG nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 954.329931] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2153872e-93b2-435c-ab63-be52f288c2dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.458237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.226s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.462444] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.950s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.462643] env[62914]: DEBUG nova.objects.instance [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'pci_requests' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.497747] env[62914]: INFO nova.scheduler.client.report [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted allocations for instance 43227b1e-c90a-47d0-a4f5-fd0af0826e94 [ 954.550508] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.582037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 954.582037] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 954.582284] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleting the datastore file [datastore1] 3e6a3787-3e9c-411c-9c3c-305a62061b47 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.582894] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acc16a09-0074-438f-9fc5-1db76ec50e53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.590364] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for the task: (returnval){ [ 954.590364] env[62914]: value = "task-4832227" [ 954.590364] env[62914]: _type = "Task" [ 954.590364] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.600896] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832227, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.640486] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.774563] env[62914]: DEBUG nova.network.neutron [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.803844] env[62914]: DEBUG nova.network.neutron [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Updating instance_info_cache with network_info: [{"id": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "address": "fa:16:3e:9d:6c:cb", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8963aef9-17", "ovs_interfaceid": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.854063] env[62914]: INFO nova.compute.manager [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Took 48.14 seconds to build instance. [ 954.971368] env[62914]: DEBUG nova.objects.instance [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'numa_topology' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.011438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63b61dbf-2437-45ab-a9d2-d21e4f5ae607 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "43227b1e-c90a-47d0-a4f5-fd0af0826e94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.282s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.104512] env[62914]: DEBUG oslo_vmware.api [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832227, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.457021} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.104903] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.105213] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 955.105479] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 955.105741] env[62914]: INFO nova.compute.manager [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Took 1.38 seconds to destroy the instance on the hypervisor. [ 955.106150] env[62914]: DEBUG oslo.service.loopingcall [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.106443] env[62914]: DEBUG nova.compute.manager [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 955.106575] env[62914]: DEBUG nova.network.neutron [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.145258] env[62914]: DEBUG oslo_vmware.api [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Task: {'id': task-4832225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.703943} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.145258] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.145258] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 955.145627] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 955.146026] env[62914]: INFO nova.compute.manager [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Took 1.65 seconds to destroy the instance on the hypervisor. [ 955.148240] env[62914]: DEBUG oslo.service.loopingcall [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.148240] env[62914]: DEBUG nova.compute.manager [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 955.148240] env[62914]: DEBUG nova.network.neutron [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.279958] env[62914]: INFO nova.compute.manager [-] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Took 1.40 seconds to deallocate network for instance. [ 955.309667] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.309980] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Instance network_info: |[{"id": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "address": "fa:16:3e:9d:6c:cb", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8963aef9-17", "ovs_interfaceid": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 955.310799] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:6c:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8963aef9-1731-4bd4-b659-83eb9724f8f9', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.323259] env[62914]: DEBUG oslo.service.loopingcall [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.323259] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 955.323259] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1435b64-cd2b-4ebf-b4ba-5d64acbf535f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.348039] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.348039] env[62914]: value = "task-4832228" [ 955.348039] env[62914]: _type = "Task" [ 955.348039] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.357270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec723d2-11d3-4693-b18e-11ee12e16a65 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.237s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.358030] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.475482] env[62914]: INFO nova.compute.claims [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.711465] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "af541b15-19ce-415a-b03e-cb605b780247" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.711786] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.712031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "af541b15-19ce-415a-b03e-cb605b780247-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.712236] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.712420] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.714886] env[62914]: INFO nova.compute.manager [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Terminating instance [ 955.717042] env[62914]: DEBUG nova.compute.manager [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 955.717318] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.718227] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580c6d19-a56a-497e-a958-835d073fad4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.727155] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 955.727457] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d58c6a0-000a-4fbb-8bb4-4730d6cddea4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.736050] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 955.736050] env[62914]: value = "task-4832229" [ 955.736050] env[62914]: _type = "Task" [ 955.736050] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.746259] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.789407] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.811022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.811022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.811022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.811022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.811022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.813137] env[62914]: INFO nova.compute.manager [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Terminating instance [ 955.820527] env[62914]: DEBUG nova.compute.manager [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 955.820527] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.821493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75858ea1-6c70-469e-b99e-ef6881118b42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.830375] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 955.830375] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d10ca582-f253-4d84-91b1-84fd86294763 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.839675] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 955.839675] env[62914]: value = "task-4832230" [ 955.839675] env[62914]: _type = "Task" [ 955.839675] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.852375] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.862070] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.956050] env[62914]: DEBUG nova.network.neutron [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.109444] env[62914]: DEBUG nova.network.neutron [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.196632] env[62914]: DEBUG nova.compute.manager [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Received event network-changed-8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 956.196632] env[62914]: DEBUG nova.compute.manager [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Refreshing instance network info cache due to event network-changed-8963aef9-1731-4bd4-b659-83eb9724f8f9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 956.196942] env[62914]: DEBUG oslo_concurrency.lockutils [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] Acquiring lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.197150] env[62914]: DEBUG oslo_concurrency.lockutils [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] Acquired lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.197335] env[62914]: DEBUG nova.network.neutron [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Refreshing network info cache for port 8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.248418] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.352390] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832230, 'name': PowerOffVM_Task, 'duration_secs': 0.254578} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.352661] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 956.352795] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 956.356339] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-042ba12b-f7ff-4a3d-bac4-b77ac51963f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.366433] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.424574] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 956.424574] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 956.424792] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore2] b77a3d27-fe9f-49fc-95d1-15fe82762833 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.424887] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b565d3d6-f4bb-4ec9-858f-5d0e3e59e376 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.433033] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 956.433033] env[62914]: value = "task-4832232" [ 956.433033] env[62914]: _type = "Task" [ 956.433033] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.446034] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.448969] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.448969] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.450843] env[62914]: DEBUG nova.compute.manager [req-990efbe9-6889-4858-b376-c86bc21855ec req-e8e7be2e-3944-4f24-be3a-da36c8f34e4e service nova] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Received event network-vif-deleted-576d897f-ee4d-4cda-bd9b-c835b51d2113 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 956.459158] env[62914]: INFO nova.compute.manager [-] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Took 1.35 seconds to deallocate network for instance. [ 956.613549] env[62914]: INFO nova.compute.manager [-] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Took 1.47 seconds to deallocate network for instance. [ 956.637547] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.639068] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.752753] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832229, 'name': PowerOffVM_Task, 'duration_secs': 0.905355} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.753119] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 956.753296] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 956.753589] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ab527d1-b538-4b3a-9e0b-8c3124dd2167 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.870058] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.947656] env[62914]: DEBUG oslo_vmware.api [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279347} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.947656] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 956.947656] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 956.947656] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 956.947656] env[62914]: INFO nova.compute.manager [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Took 1.13 seconds to destroy the instance on the hypervisor. [ 956.948881] env[62914]: DEBUG oslo.service.loopingcall [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.951576] env[62914]: DEBUG nova.compute.manager [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 956.951903] env[62914]: DEBUG nova.network.neutron [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 956.954624] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 956.966801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.096159] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "79c7728a-0452-44ec-91de-62e3f09f9183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.096404] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.112765] env[62914]: DEBUG nova.network.neutron [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Updated VIF entry in instance network info cache for port 8963aef9-1731-4bd4-b659-83eb9724f8f9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 957.113197] env[62914]: DEBUG nova.network.neutron [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Updating instance_info_cache with network_info: [{"id": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "address": "fa:16:3e:9d:6c:cb", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8963aef9-17", "ovs_interfaceid": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.116511] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e19a4f-30df-4ab7-9d68-d3e47cde6b67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.123957] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.127526] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c680519f-1eee-4819-a1f2-c9b834e8ac57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.158188] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 957.164024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423a6834-ec6b-40e0-9275-73a0fda9890e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.171074] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20e1a9e-eb45-4946-85be-c52eedec7306 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.186433] env[62914]: DEBUG nova.compute.provider_tree [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.364689] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.398918] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 957.399244] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 957.399460] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore1] af541b15-19ce-415a-b03e-cb605b780247 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.399748] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09b3f3de-1692-4877-8fc2-dc9b8be5008a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.407398] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 957.407398] env[62914]: value = "task-4832234" [ 957.407398] env[62914]: _type = "Task" [ 957.407398] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.417041] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.477098] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.599110] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 957.616063] env[62914]: DEBUG oslo_concurrency.lockutils [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] Releasing lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.616225] env[62914]: DEBUG nova.compute.manager [req-79d443fc-7296-4bde-b9e7-496ec27ba1be req-fe0662bc-7068-4fa8-9fce-947d17431236 service nova] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Received event network-vif-deleted-cda3d580-68ab-4c88-9773-f8fb5b8394f2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 957.689562] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.690710] env[62914]: DEBUG nova.scheduler.client.report [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.778009] env[62914]: DEBUG nova.network.neutron [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.868017] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.917915] env[62914]: DEBUG oslo_vmware.api [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450401} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.918227] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.918424] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 957.918611] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 957.919011] env[62914]: INFO nova.compute.manager [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: af541b15-19ce-415a-b03e-cb605b780247] Took 2.20 seconds to destroy the instance on the hypervisor. [ 957.919088] env[62914]: DEBUG oslo.service.loopingcall [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.919326] env[62914]: DEBUG nova.compute.manager [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 957.919451] env[62914]: DEBUG nova.network.neutron [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.125116] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.196525] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.734s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.199071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.104s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.200612] env[62914]: INFO nova.compute.claims [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.236949] env[62914]: INFO nova.network.neutron [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating port c3221de3-00d5-45e7-af68-04297360fbcf with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 958.245465] env[62914]: DEBUG nova.compute.manager [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Received event network-vif-deleted-27d5fb42-82df-4642-9ddc-5a34ed445dfc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 958.247180] env[62914]: DEBUG nova.compute.manager [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Received event network-vif-deleted-7df887d7-caf7-4a91-b3f6-2476e768b7c2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 958.247180] env[62914]: DEBUG nova.compute.manager [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Received event network-vif-deleted-464c2387-9349-42aa-bab6-3d349ca0ed26 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 958.247180] env[62914]: INFO nova.compute.manager [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Neutron deleted interface 464c2387-9349-42aa-bab6-3d349ca0ed26; detaching it from the instance and deleting it from the info cache [ 958.247180] env[62914]: DEBUG nova.network.neutron [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.283706] env[62914]: INFO nova.compute.manager [-] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Took 1.33 seconds to deallocate network for instance. [ 958.366267] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832228, 'name': CreateVM_Task, 'duration_secs': 2.620662} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.366925] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 958.370147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.370147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.371405] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.371659] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50a7b979-f536-4488-9b14-8a2d8f330a03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.377541] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 958.377541] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f71fbe-6953-78ae-8292-67b4a3bb52c2" [ 958.377541] env[62914]: _type = "Task" [ 958.377541] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.387788] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f71fbe-6953-78ae-8292-67b4a3bb52c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.700349] env[62914]: DEBUG nova.network.neutron [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.749364] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e719f7c-40d4-4cd1-94a6-b9d697bff2cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.763598] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff298491-4f6a-44ee-9db3-690710087cb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.800762] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.801160] env[62914]: DEBUG nova.compute.manager [req-b316d538-b132-4429-ade4-aae8af41412d req-77e199ee-a607-4fbb-9778-3a11a7f1ed72 service nova] [instance: af541b15-19ce-415a-b03e-cb605b780247] Detach interface failed, port_id=464c2387-9349-42aa-bab6-3d349ca0ed26, reason: Instance af541b15-19ce-415a-b03e-cb605b780247 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 958.888415] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f71fbe-6953-78ae-8292-67b4a3bb52c2, 'name': SearchDatastore_Task, 'duration_secs': 0.013101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.888769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.889021] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.889270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.889423] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.889606] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.889913] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2302a0fd-85fc-49aa-8aca-fcb109fd5b37 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.899192] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.899391] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 958.900133] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d3ede4-8c9d-4541-a9b1-56fae034ad5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.905753] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 958.905753] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52abfb94-2d23-f456-40c4-f4af12b591ab" [ 958.905753] env[62914]: _type = "Task" [ 958.905753] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.915031] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52abfb94-2d23-f456-40c4-f4af12b591ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.204029] env[62914]: INFO nova.compute.manager [-] [instance: af541b15-19ce-415a-b03e-cb605b780247] Took 1.28 seconds to deallocate network for instance. [ 959.421285] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52abfb94-2d23-f456-40c4-f4af12b591ab, 'name': SearchDatastore_Task, 'duration_secs': 0.015774} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.424534] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c443ac-b08f-4bef-b56d-338a2c2357b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.429572] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 959.429572] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa111e-14bd-ac77-4a32-1d0fe3f80005" [ 959.429572] env[62914]: _type = "Task" [ 959.429572] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.442059] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa111e-14bd-ac77-4a32-1d0fe3f80005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.646610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b8fcc8-64c6-4e12-a3c3-032131fb8f5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.656036] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f4aa1a-46c9-4595-82be-984cbded9b85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.690899] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036fc932-88f0-4626-b893-f578fe6bd2ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.700478] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485a0034-6d1c-479d-9c5e-d1a7ee64ec0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.718455] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.719048] env[62914]: DEBUG nova.compute.provider_tree [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.901684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.901684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.902093] env[62914]: DEBUG nova.network.neutron [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 959.940809] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa111e-14bd-ac77-4a32-1d0fe3f80005, 'name': SearchDatastore_Task, 'duration_secs': 0.01617} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.941191] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.941469] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] d9476d24-fbc5-4e30-bf67-85c388e943fd/d9476d24-fbc5-4e30-bf67-85c388e943fd.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 959.941866] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9f05fcf-9f38-4f33-887b-d2dcb21a8a2d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.952061] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 959.952061] env[62914]: value = "task-4832235" [ 959.952061] env[62914]: _type = "Task" [ 959.952061] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.961515] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.222539] env[62914]: DEBUG nova.scheduler.client.report [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.272218] env[62914]: DEBUG nova.compute.manager [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 960.272606] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.272874] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.273096] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.273361] env[62914]: DEBUG nova.compute.manager [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] No waiting events found dispatching network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 960.273580] env[62914]: WARNING nova.compute.manager [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received unexpected event network-vif-plugged-c3221de3-00d5-45e7-af68-04297360fbcf for instance with vm_state shelved_offloaded and task_state spawning. [ 960.274196] env[62914]: DEBUG nova.compute.manager [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 960.274196] env[62914]: DEBUG nova.compute.manager [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing instance network info cache due to event network-changed-c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 960.274196] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Acquiring lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.469084] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832235, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.712153] env[62914]: DEBUG nova.network.neutron [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.727593] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.728182] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 960.731068] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.683s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.732848] env[62914]: INFO nova.compute.claims [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.965437] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832235, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714365} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.965733] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] d9476d24-fbc5-4e30-bf67-85c388e943fd/d9476d24-fbc5-4e30-bf67-85c388e943fd.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 960.965962] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.966257] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9db263b5-6b3a-4a8c-9326-6eca769161e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.973870] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 960.973870] env[62914]: value = "task-4832236" [ 960.973870] env[62914]: _type = "Task" [ 960.973870] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.982698] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.215621] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.218201] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Acquired lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.218461] env[62914]: DEBUG nova.network.neutron [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Refreshing network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 961.242065] env[62914]: DEBUG nova.compute.utils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.243292] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 961.243663] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 961.249340] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='97afae5303cf9194208f323118d63274',container_format='bare',created_at=2025-11-25T11:27:48Z,direct_url=,disk_format='vmdk',id=2ebc6dd3-004e-4687-828d-5c558c734f0f,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1156296996-shelved',owner='adf406f1352240aba2338e64b8f182b4',properties=ImageMetaProps,protected=,size=31662592,status='active',tags=,updated_at=2025-11-25T11:28:11Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 961.249607] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 961.249786] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.249984] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 961.250158] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.250315] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 961.250529] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 961.250693] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 961.250867] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 961.251083] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 961.251284] env[62914]: DEBUG nova.virt.hardware [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 961.252183] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862dab9b-0a29-471c-9ba5-5721cb13d2ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.262548] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f64faa-2f1f-48e4-aeae-b7b0345911f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.279810] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:e7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3221de3-00d5-45e7-af68-04297360fbcf', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.287700] env[62914]: DEBUG oslo.service.loopingcall [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.289082] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 961.289347] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2be90a95-95f5-4f63-b007-8f1df878c232 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.305262] env[62914]: DEBUG nova.policy [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 961.313488] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.313488] env[62914]: value = "task-4832237" [ 961.313488] env[62914]: _type = "Task" [ 961.313488] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.322913] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832237, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.487399] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094202} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.487399] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.487399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f76cff-0ef5-4f96-bc8e-0d858b38e6fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.515211] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] d9476d24-fbc5-4e30-bf67-85c388e943fd/d9476d24-fbc5-4e30-bf67-85c388e943fd.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.515630] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3532e6ea-ae7f-470d-9e06-f3831328795a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.539242] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 961.539242] env[62914]: value = "task-4832238" [ 961.539242] env[62914]: _type = "Task" [ 961.539242] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.548200] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.615503] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Successfully created port: b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.746996] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 961.827470] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832237, 'name': CreateVM_Task, 'duration_secs': 0.4406} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.830219] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 961.833302] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.833550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.833925] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 961.834236] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-202f59b2-824c-49b0-828f-31eeb788d014 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.840836] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 961.840836] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526de60b-9113-3c5f-8c66-703b6b1adef6" [ 961.840836] env[62914]: _type = "Task" [ 961.840836] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.854254] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526de60b-9113-3c5f-8c66-703b6b1adef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.048789] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832238, 'name': ReconfigVM_Task, 'duration_secs': 0.386937} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.051677] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Reconfigured VM instance instance-00000055 to attach disk [datastore1] d9476d24-fbc5-4e30-bf67-85c388e943fd/d9476d24-fbc5-4e30-bf67-85c388e943fd.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.052706] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-660d1bdb-ec04-436e-9633-7f4d8167f990 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.069250] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 962.069250] env[62914]: value = "task-4832239" [ 962.069250] env[62914]: _type = "Task" [ 962.069250] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.080647] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832239, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.128820] env[62914]: DEBUG nova.network.neutron [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updated VIF entry in instance network info cache for port c3221de3-00d5-45e7-af68-04297360fbcf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 962.132568] env[62914]: DEBUG nova.network.neutron [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [{"id": "c3221de3-00d5-45e7-af68-04297360fbcf", "address": "fa:16:3e:68:e7:1f", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3221de3-00", "ovs_interfaceid": "c3221de3-00d5-45e7-af68-04297360fbcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.257943] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c06847-a70f-4046-986b-6a57be626f30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.266352] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98111ae8-bbf4-4062-abbc-0e1772d4480d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.305254] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a505dfca-5645-44f9-8bc0-651123988d5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.314062] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf6b7ea-f9fb-4c10-931b-f8df331cd109 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.329950] env[62914]: DEBUG nova.compute.provider_tree [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.352483] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.352749] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Processing image 2ebc6dd3-004e-4687-828d-5c558c734f0f {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.353015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.353180] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.353365] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.353625] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e3e8c44-644d-45d8-922f-83028762bfc8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.362914] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.363151] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 962.363863] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a584a2ca-04c0-4f56-ae45-4db2cb8837dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.370166] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 962.370166] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527f3b3e-5a78-674e-9420-d0d2c4d23752" [ 962.370166] env[62914]: _type = "Task" [ 962.370166] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.379228] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527f3b3e-5a78-674e-9420-d0d2c4d23752, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.579591] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832239, 'name': Rename_Task, 'duration_secs': 0.237565} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.579977] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 962.580212] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c102432-bcd1-4403-8ee5-11343909f1e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.587617] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 962.587617] env[62914]: value = "task-4832240" [ 962.587617] env[62914]: _type = "Task" [ 962.587617] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.596298] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.638846] env[62914]: DEBUG oslo_concurrency.lockutils [req-a817cfab-9245-4934-9222-66ae5565ed5e req-ba996de9-8978-4469-81b1-92150f3ef6d5 service nova] Releasing lock "refresh_cache-dc99b470-4334-408d-8853-d2e9b9204d04" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.762870] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 962.789627] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.789889] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.790066] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.792845] env[62914]: DEBUG nova.virt.hardware [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.792845] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e520410-a953-425c-9b68-258a93ed1b5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.800885] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fb2583-36bf-4a9c-973e-77274d9b84c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.833121] env[62914]: DEBUG nova.scheduler.client.report [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.881153] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 962.881493] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Fetch image to [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42/OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 962.881723] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Downloading stream optimized image 2ebc6dd3-004e-4687-828d-5c558c734f0f to [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42/OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42.vmdk on the data store datastore1 as vApp {{(pid=62914) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 962.881925] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Downloading image file data 2ebc6dd3-004e-4687-828d-5c558c734f0f to the ESX as VM named 'OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42' {{(pid=62914) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 962.959882] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 962.959882] env[62914]: value = "resgroup-9" [ 962.959882] env[62914]: _type = "ResourcePool" [ 962.959882] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 962.960716] env[62914]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e1a67c12-86a8-4aa7-8435-8846e186a376 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.983882] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease: (returnval){ [ 962.983882] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 962.983882] env[62914]: _type = "HttpNfcLease" [ 962.983882] env[62914]: } obtained for vApp import into resource pool (val){ [ 962.983882] env[62914]: value = "resgroup-9" [ 962.983882] env[62914]: _type = "ResourcePool" [ 962.983882] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 962.984278] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the lease: (returnval){ [ 962.984278] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 962.984278] env[62914]: _type = "HttpNfcLease" [ 962.984278] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 962.992525] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.992525] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 962.992525] env[62914]: _type = "HttpNfcLease" [ 962.992525] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 963.100907] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832240, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.210272] env[62914]: DEBUG nova.compute.manager [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Received event network-vif-plugged-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 963.210272] env[62914]: DEBUG oslo_concurrency.lockutils [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.210272] env[62914]: DEBUG oslo_concurrency.lockutils [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.210821] env[62914]: DEBUG oslo_concurrency.lockutils [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.211197] env[62914]: DEBUG nova.compute.manager [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] No waiting events found dispatching network-vif-plugged-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 963.211522] env[62914]: WARNING nova.compute.manager [req-95e65eee-4d77-41e8-a338-a6ca034ea214 req-3d6da1a8-31e6-48e9-bb62-edffdd258f0d service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Received unexpected event network-vif-plugged-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 for instance with vm_state building and task_state spawning. [ 963.260535] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Successfully updated port: b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 963.338690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.339121] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 963.342410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.191s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.342602] env[62914]: DEBUG nova.objects.instance [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 963.493552] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.493552] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 963.493552] env[62914]: _type = "HttpNfcLease" [ 963.493552] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 963.598946] env[62914]: DEBUG oslo_vmware.api [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832240, 'name': PowerOnVM_Task, 'duration_secs': 0.548433} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.599342] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 963.599525] env[62914]: INFO nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Took 11.52 seconds to spawn the instance on the hypervisor. [ 963.599712] env[62914]: DEBUG nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 963.600573] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae267a9-1dc6-4711-ab19-e5c430da9fb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.764574] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.764732] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.764899] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.847591] env[62914]: DEBUG nova.compute.utils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.853990] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 963.994146] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.994146] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 963.994146] env[62914]: _type = "HttpNfcLease" [ 963.994146] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 963.994472] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 963.994472] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259b71d-f439-f6ea-5c4f-27f8d68eba88" [ 963.994472] env[62914]: _type = "HttpNfcLease" [ 963.994472] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 963.995261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6147f4a0-372a-432d-9fd7-e9b3db165ef5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.004957] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 964.005137] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HTTP connection to write to file with size = 31662592 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 964.068619] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-04250fdd-67e3-4ee8-acc7-2cafba788e16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.120031] env[62914]: INFO nova.compute.manager [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Took 41.20 seconds to build instance. [ 964.308272] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.345901] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Acquiring lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.346114] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Acquired lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.346299] env[62914]: DEBUG nova.network.neutron [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.354314] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 964.358959] env[62914]: DEBUG oslo_concurrency.lockutils [None req-60196b4b-4a03-4038-8214-ad6384107502 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.363138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.854s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.363410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.363567] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 964.363919] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.790s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.365484] env[62914]: INFO nova.compute.claims [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.370076] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb23b48-cfd0-47a4-83f7-887b41a58123 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.380729] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b480018-e348-4fb0-8ccc-f558cc54a2a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.398510] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5fd05b-a4b9-411c-977e-b7a9fefda543 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.405473] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fdd979-626f-4f00-a5a0-933d59ee4d2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.437621] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178125MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 964.437783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.520620] env[62914]: DEBUG nova.network.neutron [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Updating instance_info_cache with network_info: [{"id": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "address": "fa:16:3e:ef:c1:3f", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2fe080d-62", "ovs_interfaceid": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.622315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fdc4cac1-6467-4000-bea7-250ec5364fe6 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.716s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.024541] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.024541] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance network_info: |[{"id": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "address": "fa:16:3e:ef:c1:3f", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2fe080d-62", "ovs_interfaceid": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 965.028545] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:c1:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2fe080d-6273-4a2c-b4dc-2d9ec37d4161', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.040123] env[62914]: DEBUG oslo.service.loopingcall [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.040123] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.040123] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa62ec71-7497-4622-b99c-09bda6c8aeab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.068028] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.068028] env[62914]: value = "task-4832242" [ 965.068028] env[62914]: _type = "Task" [ 965.068028] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.082087] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832242, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.150527] env[62914]: DEBUG nova.network.neutron [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Updating instance_info_cache with network_info: [{"id": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "address": "fa:16:3e:9d:6c:cb", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8963aef9-17", "ovs_interfaceid": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.362471] env[62914]: DEBUG nova.compute.manager [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Received event network-changed-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 965.362725] env[62914]: DEBUG nova.compute.manager [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Refreshing instance network info cache due to event network-changed-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 965.363018] env[62914]: DEBUG oslo_concurrency.lockutils [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] Acquiring lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.363277] env[62914]: DEBUG oslo_concurrency.lockutils [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] Acquired lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.363522] env[62914]: DEBUG nova.network.neutron [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Refreshing network info cache for port b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 965.389397] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 965.425193] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.425538] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.425769] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.426039] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.426269] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.426484] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.426764] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.427017] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.427284] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.427515] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.427764] env[62914]: DEBUG nova.virt.hardware [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.428721] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491bd39a-abee-4ae4-b200-f964f569311a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.441388] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4871dc8b-f681-4d7e-aea5-744c021cc2f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.469033] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.479319] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Creating folder: Project (68b1e0389ce24e5ca474fd7882a59df7). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.490424] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45d9e553-8635-4488-9fe8-d4904441c5c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.512780] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Created folder: Project (68b1e0389ce24e5ca474fd7882a59df7) in parent group-v941773. [ 965.513200] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Creating folder: Instances. Parent ref: group-v942007. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.517600] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3db0acf0-6fef-4a67-bb61-63020b6cc69b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.531417] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Created folder: Instances in parent group-v942007. [ 965.531773] env[62914]: DEBUG oslo.service.loopingcall [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.537419] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.537419] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-000cacbf-74da-4bf5-91ce-65e1a34b6f2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.557152] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.557152] env[62914]: value = "task-4832245" [ 965.557152] env[62914]: _type = "Task" [ 965.557152] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.565793] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832245, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.579297] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832242, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.658594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Releasing lock "refresh_cache-d9476d24-fbc5-4e30-bf67-85c388e943fd" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.659171] env[62914]: DEBUG nova.compute.manager [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Inject network info {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 965.659510] env[62914]: DEBUG nova.compute.manager [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] network_info to inject: |[{"id": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "address": "fa:16:3e:9d:6c:cb", "network": {"id": "78973726-60cf-4f6d-8aae-4ea7c2d4ffa3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-564935929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5adc4dc554ed4fe69f214161fd8ab9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8963aef9-17", "ovs_interfaceid": "8963aef9-1731-4bd4-b659-83eb9724f8f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 965.665174] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Reconfiguring VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 965.670983] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1acd8d2c-23e3-464c-b248-2fe12402d5d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.692347] env[62914]: DEBUG oslo_vmware.api [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Waiting for the task: (returnval){ [ 965.692347] env[62914]: value = "task-4832246" [ 965.692347] env[62914]: _type = "Task" [ 965.692347] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.707540] env[62914]: DEBUG oslo_vmware.api [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Task: {'id': task-4832246, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.879389] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 965.879730] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 965.881281] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6328c7-f025-4cf8-a305-b37c95e1618f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.892751] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 965.892999] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 965.893278] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-55ed6aba-2540-4f24-a26f-7b5a6f6c25c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.944032] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd805194-fe79-48cb-b74b-f19cc3acd04b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.952288] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcf6136-9d28-4669-9d10-80b4baf7d103 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.987198] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6252df-843c-45f4-883b-d8a51390fe06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.996831] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51815fc-9720-4d11-9a05-8b9a5f73e702 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.012691] env[62914]: DEBUG nova.compute.provider_tree [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.069085] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832245, 'name': CreateVM_Task, 'duration_secs': 0.393202} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.069333] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 966.069869] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.070062] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.070420] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 966.070745] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a57e3b58-b62c-4eda-b412-2d25ae469e35 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.080607] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 966.080607] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5260e9c9-79c8-8790-def9-e30933055610" [ 966.080607] env[62914]: _type = "Task" [ 966.080607] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.084398] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832242, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.094379] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5260e9c9-79c8-8790-def9-e30933055610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.155647] env[62914]: DEBUG oslo_vmware.rw_handles [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528dd845-c878-391d-92cf-270b56ce4f40/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 966.155922] env[62914]: INFO nova.virt.vmwareapi.images [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Downloaded image file data 2ebc6dd3-004e-4687-828d-5c558c734f0f [ 966.157035] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c32a6a-9797-418a-837b-c1a40e7ae641 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.176682] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d77efaca-448f-40f9-b8a9-a0c4d5a780de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.202639] env[62914]: DEBUG oslo_vmware.api [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] Task: {'id': task-4832246, 'name': ReconfigVM_Task, 'duration_secs': 0.257845} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.205230] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-22f8acaf-ebdf-437b-bead-9a1aae1e3fff tempest-ServersAdminTestJSON-1197653150 tempest-ServersAdminTestJSON-1197653150-project-admin] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Reconfigured VM instance to set the machine id {{(pid=62914) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 966.326017] env[62914]: INFO nova.virt.vmwareapi.images [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] The imported VM was unregistered [ 966.329128] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 966.329401] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.329852] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac20f18f-1ad3-42e5-8fff-21b864be4cf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.347083] env[62914]: DEBUG nova.network.neutron [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Updated VIF entry in instance network info cache for port b2fe080d-6273-4a2c-b4dc-2d9ec37d4161. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 966.347467] env[62914]: DEBUG nova.network.neutron [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Updating instance_info_cache with network_info: [{"id": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "address": "fa:16:3e:ef:c1:3f", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2fe080d-62", "ovs_interfaceid": "b2fe080d-6273-4a2c-b4dc-2d9ec37d4161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.358399] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.358643] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42/OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42.vmdk to [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk. {{(pid=62914) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 966.359689] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c7d2336c-b645-46e9-a766-7fa6c36677d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.368959] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 966.368959] env[62914]: value = "task-4832248" [ 966.368959] env[62914]: _type = "Task" [ 966.368959] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.378050] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.516335] env[62914]: DEBUG nova.scheduler.client.report [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.582787] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832242, 'name': CreateVM_Task, 'duration_secs': 1.479962} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.582787] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 966.583470] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.594542] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5260e9c9-79c8-8790-def9-e30933055610, 'name': SearchDatastore_Task, 'duration_secs': 0.015116} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.594878] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.595140] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.595399] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.595550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.595734] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.596057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.596829] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 966.596829] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52cdd6df-b75b-48f9-9926-4d6516ca4016 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.601020] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90deef09-bd76-4833-a947-f0f9759e79ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.605279] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 966.605279] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f52b9b-8de9-afb5-361c-94baa47eed4f" [ 966.605279] env[62914]: _type = "Task" [ 966.605279] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.610702] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.611042] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 966.612293] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0ddaf6-53fc-48de-853d-517a3fe8a118 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.619873] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f52b9b-8de9-afb5-361c-94baa47eed4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.623785] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 966.623785] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b919ec-7ebd-a47e-7e9b-df9d664b1877" [ 966.623785] env[62914]: _type = "Task" [ 966.623785] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.633984] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b919ec-7ebd-a47e-7e9b-df9d664b1877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.850868] env[62914]: DEBUG oslo_concurrency.lockutils [req-f5302183-0f07-4d3c-acf8-aded484a74e0 req-0b680d9e-8359-48b0-9f68-c9c897b95726 service nova] Releasing lock "refresh_cache-dac99ed2-aed9-4c3e-bcab-a8de9967990c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.879969] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.023855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.024990] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 967.028134] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.720s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.028380] env[62914]: DEBUG nova.objects.instance [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lazy-loading 'resources' on Instance uuid cca4bbf9-8864-4805-b95e-954e6b570eae {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.119596] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f52b9b-8de9-afb5-361c-94baa47eed4f, 'name': SearchDatastore_Task, 'duration_secs': 0.04691} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.119947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.120224] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 967.120473] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.138016] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b919ec-7ebd-a47e-7e9b-df9d664b1877, 'name': SearchDatastore_Task, 'duration_secs': 0.036409} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.138913] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efea1526-a192-4ec4-b070-fead3cbd9636 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.147849] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 967.147849] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e886b5-93ce-cfae-2ad8-d65748c3d75d" [ 967.147849] env[62914]: _type = "Task" [ 967.147849] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.160169] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e886b5-93ce-cfae-2ad8-d65748c3d75d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.287837] env[62914]: INFO nova.compute.manager [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Rebuilding instance [ 967.348060] env[62914]: DEBUG nova.compute.manager [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 967.349696] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eddcc81-ecdb-4f83-8b33-4c6e3e84687c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.383846] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.536101] env[62914]: DEBUG nova.compute.utils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 967.538752] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 967.539079] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 967.591565] env[62914]: DEBUG nova.policy [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 967.663486] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e886b5-93ce-cfae-2ad8-d65748c3d75d, 'name': SearchDatastore_Task, 'duration_secs': 0.082426} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.667037] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.667418] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b443050b-78ae-4f9d-81d4-508f5cf4a322/b443050b-78ae-4f9d-81d4-508f5cf4a322.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 967.668077] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.668336] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 967.668626] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7c1293e-d77f-4e94-93d2-6b11059d4822 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.671110] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af066747-9429-4725-bc71-db0cb7cc8ecf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.686696] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 967.686696] env[62914]: value = "task-4832249" [ 967.686696] env[62914]: _type = "Task" [ 967.686696] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.691894] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.692190] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 967.696411] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-996f06cb-c3df-4fb0-9b84-9a975dbef557 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.699730] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.709332] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 967.709332] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52246949-5afc-034f-7f43-24d43d4d067a" [ 967.709332] env[62914]: _type = "Task" [ 967.709332] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.720080] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52246949-5afc-034f-7f43-24d43d4d067a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.867056] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 967.867452] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4753ee6d-b512-42f8-a7d2-9212a17da854 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.890569] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.893432] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 967.893432] env[62914]: value = "task-4832250" [ 967.893432] env[62914]: _type = "Task" [ 967.893432] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.915426] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.021520] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Successfully created port: de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.043767] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 968.086915] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49dccfd-943b-4ed9-921a-3c89c716274a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.098621] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2f218b-043b-4696-8161-d85fc87ebf13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.139841] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dcabae-da06-47a7-b061-66a8f96d7ce8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.152224] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8d81bd-3aa6-4eed-ae2c-8f3290dbb97d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.173059] env[62914]: DEBUG nova.compute.provider_tree [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.200335] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.223664] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52246949-5afc-034f-7f43-24d43d4d067a, 'name': SearchDatastore_Task, 'duration_secs': 0.078137} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.224586] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a585eac9-ca93-4b9d-b8a0-f077db3f94d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.234189] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 968.234189] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f97d9d-6427-85ef-3072-fde4758ebf48" [ 968.234189] env[62914]: _type = "Task" [ 968.234189] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.245986] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f97d9d-6427-85ef-3072-fde4758ebf48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.384671] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.406275] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832250, 'name': PowerOffVM_Task, 'duration_secs': 0.511603} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.406613] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 968.406841] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 968.407765] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77148698-eda9-4662-aff5-d486285cd1f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.418082] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 968.418463] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-137638ba-5672-4804-812d-524719b4063c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.676892] env[62914]: DEBUG nova.scheduler.client.report [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.699914] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.746062] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f97d9d-6427-85ef-3072-fde4758ebf48, 'name': SearchDatastore_Task, 'duration_secs': 0.094126} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.746062] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.746062] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 968.746459] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c064b9cf-4569-4267-a587-ab5e8dcbbae1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.755659] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 968.755659] env[62914]: value = "task-4832252" [ 968.755659] env[62914]: _type = "Task" [ 968.755659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.764800] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.882352] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.029167] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 969.029855] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 969.030283] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore1] 10102941-c31a-4ab1-be5a-801520d49fd7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.030767] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3da963fa-ff1d-4c88-aa04-b5c5b666e32d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.043437] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 969.043437] env[62914]: value = "task-4832253" [ 969.043437] env[62914]: _type = "Task" [ 969.043437] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.054916] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 969.060821] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.085376] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.085626] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.085792] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.085979] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.086149] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.086307] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.086523] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.086691] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.086872] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.087044] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.087232] env[62914]: DEBUG nova.virt.hardware [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.088212] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576c1b1c-7745-4446-a04d-8b2cdd90ddc8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.100100] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96743a0f-fb78-45e4-91d7-e461df75831d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.222522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.222522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.889s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.222522] env[62914]: DEBUG nova.objects.instance [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lazy-loading 'resources' on Instance uuid 769c3873-7480-47de-894b-40dbf3f2f7f0 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.222522] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.228142] env[62914]: INFO nova.scheduler.client.report [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Deleted allocations for instance cca4bbf9-8864-4805-b95e-954e6b570eae [ 969.270025] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.382437] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.493174] env[62914]: DEBUG nova.compute.manager [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-vif-plugged-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 969.493573] env[62914]: DEBUG oslo_concurrency.lockutils [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.494849] env[62914]: DEBUG oslo_concurrency.lockutils [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.494849] env[62914]: DEBUG oslo_concurrency.lockutils [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.494849] env[62914]: DEBUG nova.compute.manager [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] No waiting events found dispatching network-vif-plugged-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 969.495040] env[62914]: WARNING nova.compute.manager [req-513d841a-c1fd-4b62-b7e3-0de6eb227646 req-0d676a4e-9852-4334-9bb6-af66c2ccc3e0 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received unexpected event network-vif-plugged-de62c681-4ead-4636-8a49-3bcab66952b9 for instance with vm_state building and task_state spawning. [ 969.555324] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.590926] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Successfully updated port: de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.700478] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.736849] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cf1e9c81-a644-425c-ab4e-89b44ab82c19 tempest-AttachInterfacesUnderV243Test-1577368867 tempest-AttachInterfacesUnderV243Test-1577368867-project-member] Lock "cca4bbf9-8864-4805-b95e-954e6b570eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.425s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.768794] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.889871] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832248, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.313344} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.890379] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42/OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42.vmdk to [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk. [ 969.890996] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Cleaning up location [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 969.891237] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_179a1c06-b10f-409c-a395-c4cf1bb57a42 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.894600] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d7f1c89-0c2d-459d-88e0-f2cf33de5e7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.903978] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 969.903978] env[62914]: value = "task-4832254" [ 969.903978] env[62914]: _type = "Task" [ 969.903978] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.918746] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832254, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.062463] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.097107] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.097390] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.097475] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.128098] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64436dd-09d9-4598-8e19-9f91da6a366c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.140011] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989ec76a-c885-4b21-9ec8-dffdbac47a44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.178177] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9efdbab-5532-4289-ab72-adb149ca87f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.190315] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921568e6-95e9-4edb-b4c4-985ce5b9b5bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.204967] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.214528] env[62914]: DEBUG nova.compute.provider_tree [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.270855] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.417237] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832254, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26656} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.417541] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.417718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.417971] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk to [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 970.418290] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86c95ecb-710a-4e67-92de-bb9391dd1e1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.426995] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 970.426995] env[62914]: value = "task-4832255" [ 970.426995] env[62914]: _type = "Task" [ 970.426995] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.436945] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.556067] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.066921} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.556357] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.556546] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 970.556785] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 970.631819] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 970.707871] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832249, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.892647} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.709452] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b443050b-78ae-4f9d-81d4-508f5cf4a322/b443050b-78ae-4f9d-81d4-508f5cf4a322.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 970.709940] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.710507] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4b44d5a-27e1-445f-b6de-98eeca932840 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.717292] env[62914]: DEBUG nova.scheduler.client.report [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.724976] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 970.724976] env[62914]: value = "task-4832256" [ 970.724976] env[62914]: _type = "Task" [ 970.724976] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.740611] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.767762] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.825776] env[62914]: DEBUG nova.network.neutron [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.939671] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.223720] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.226954] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.222s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.228040] env[62914]: INFO nova.compute.claims [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.246996] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.228593} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.246996] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.247432] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef0fee-0cd8-4fd9-84e0-ba6a3985c9d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.269950] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] b443050b-78ae-4f9d-81d4-508f5cf4a322/b443050b-78ae-4f9d-81d4-508f5cf4a322.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.272236] env[62914]: INFO nova.scheduler.client.report [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleted allocations for instance 769c3873-7480-47de-894b-40dbf3f2f7f0 [ 971.279498] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-844bcee5-55b4-4f16-beb4-5c487a677f38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.302229] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.303887] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 971.303887] env[62914]: value = "task-4832257" [ 971.303887] env[62914]: _type = "Task" [ 971.303887] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.317763] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832257, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.329639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.329978] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Instance network_info: |[{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 971.330450] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:2c:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de62c681-4ead-4636-8a49-3bcab66952b9', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.338166] env[62914]: DEBUG oslo.service.loopingcall [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.338799] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 971.339077] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dc14cfa-3ac7-4afc-8407-54dd3ebaebf8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.360596] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.360596] env[62914]: value = "task-4832258" [ 971.360596] env[62914]: _type = "Task" [ 971.360596] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.369374] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832258, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.438542] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.542461] env[62914]: DEBUG nova.compute.manager [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 971.542626] env[62914]: DEBUG nova.compute.manager [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing instance network info cache due to event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 971.542864] env[62914]: DEBUG oslo_concurrency.lockutils [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.543046] env[62914]: DEBUG oslo_concurrency.lockutils [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.545060] env[62914]: DEBUG nova.network.neutron [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 971.602528] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.602782] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.602948] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.603152] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.603310] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.603559] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.603782] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.603947] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.604146] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.604320] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.604497] env[62914]: DEBUG nova.virt.hardware [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.605418] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b63a93-69e2-4c74-9874-8b6529e59c01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.615510] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979483dd-6591-41fe-be64-f6ae7ee77c9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.631880] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:a1:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.640614] env[62914]: DEBUG oslo.service.loopingcall [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.640960] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 971.641263] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-740fb76c-fcbd-4f43-957d-4f43bf851523 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.664036] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.664036] env[62914]: value = "task-4832259" [ 971.664036] env[62914]: _type = "Task" [ 971.664036] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.673301] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832259, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.773044] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832252, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.825214} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.773353] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 971.773584] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.773908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0f24cda-7a97-4d52-bd96-08ef30ae869b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.786461] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5c1c548a-d8ed-49ab-bdd2-3db09cf1bc56 tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "769c3873-7480-47de-894b-40dbf3f2f7f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.440s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.790791] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 971.790791] env[62914]: value = "task-4832260" [ 971.790791] env[62914]: _type = "Task" [ 971.790791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.801730] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.816578] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832257, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.875519] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832258, 'name': CreateVM_Task, 'duration_secs': 0.474778} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.875519] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 971.875519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.875519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.875519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.875519] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9be3e3af-139c-4046-9c2e-ba8ad887bd0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.881651] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 971.881651] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f221c2-4e26-6a1b-7076-f261cce90d8b" [ 971.881651] env[62914]: _type = "Task" [ 971.881651] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.894952] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f221c2-4e26-6a1b-7076-f261cce90d8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.940268] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.181495] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832259, 'name': CreateVM_Task, 'duration_secs': 0.44596} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.181495] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 972.181724] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.182067] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.182832] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.187200] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59982b46-dd4b-487d-8e17-9d223259f67a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.195018] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 972.195018] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bba34-2f4f-a676-3a1f-a64655088b85" [ 972.195018] env[62914]: _type = "Task" [ 972.195018] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.209279] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bba34-2f4f-a676-3a1f-a64655088b85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.305830] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265932} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.313394] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.319804] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3432626b-a8f0-48fa-805a-ff1dc63c568c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.333573] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832257, 'name': ReconfigVM_Task, 'duration_secs': 0.5671} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.345636] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Reconfigured VM instance instance-00000057 to attach disk [datastore1] b443050b-78ae-4f9d-81d4-508f5cf4a322/b443050b-78ae-4f9d-81d4-508f5cf4a322.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.357186] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.358467] env[62914]: DEBUG nova.network.neutron [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updated VIF entry in instance network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 972.358923] env[62914]: DEBUG nova.network.neutron [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.363412] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5452577d-2b5e-429e-80ea-a17a7fa88990 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.365273] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5dab81c-1609-47a0-8201-190285861917 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.397025] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 972.397025] env[62914]: value = "task-4832261" [ 972.397025] env[62914]: _type = "Task" [ 972.397025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.397791] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 972.397791] env[62914]: value = "task-4832262" [ 972.397791] env[62914]: _type = "Task" [ 972.397791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.415938] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f221c2-4e26-6a1b-7076-f261cce90d8b, 'name': SearchDatastore_Task, 'duration_secs': 0.0836} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.417228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.417557] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.417871] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.418052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.418254] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.421590] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e2e201-15e9-4361-9ef4-906cd5340403 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.434378] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832261, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.434706] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.452148] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.454780] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.455025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 972.458902] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5063c4a-2a18-4b46-a966-e1d2288117d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.468466] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 972.468466] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52756db1-255c-13d4-c276-93f9224bc052" [ 972.468466] env[62914]: _type = "Task" [ 972.468466] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.481624] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52756db1-255c-13d4-c276-93f9224bc052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.537968] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "bdec185e-2af7-4379-8c67-03e125750bb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.538345] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.538585] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "bdec185e-2af7-4379-8c67-03e125750bb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.538827] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.539046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.542247] env[62914]: INFO nova.compute.manager [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Terminating instance [ 972.547400] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "refresh_cache-bdec185e-2af7-4379-8c67-03e125750bb4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.547642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquired lock "refresh_cache-bdec185e-2af7-4379-8c67-03e125750bb4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.549253] env[62914]: DEBUG nova.network.neutron [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 972.710904] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525bba34-2f4f-a676-3a1f-a64655088b85, 'name': SearchDatastore_Task, 'duration_secs': 0.050357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.714641] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.714869] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.715287] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.715477] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.715704] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.716424] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afaf5afb-7be3-41a9-bc11-81ef0543ae63 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.732998] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.733173] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 972.734084] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8960d728-9a7d-40a2-be46-fe25ca1589e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.744120] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 972.744120] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52867cbc-691b-533b-d8a9-50ca2164bf7d" [ 972.744120] env[62914]: _type = "Task" [ 972.744120] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.757133] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52867cbc-691b-533b-d8a9-50ca2164bf7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.861270] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d986d810-dee5-4fb2-babc-4355ff63faea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.873691] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bb1f38-06f5-4a15-9c87-6a29dcb2f56c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.909996] env[62914]: DEBUG oslo_concurrency.lockutils [req-dc6fbb87-7f0a-4648-ab75-76fe64de0ba8 req-d46895e1-d3d5-4347-87f9-b1894e3c0a1a service nova] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.917297] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3075f3c4-ea5a-48e4-86c7-3d9f9783afb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.932567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef6f1ea-8774-4c72-9025-cd47cfaacd49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.937277] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832262, 'name': ReconfigVM_Task, 'duration_secs': 0.427835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.937550] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832261, 'name': Rename_Task, 'duration_secs': 0.201774} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.937904] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Reconfigured VM instance instance-00000056 to attach disk [datastore1] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.938601] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 972.942712] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-077030fd-bd6e-4bcc-bfae-3f0444740aa6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.944439] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b84c60da-7d73-4c10-82de-9512f78df74f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.955347] env[62914]: DEBUG nova.compute.provider_tree [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.962667] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 972.962667] env[62914]: value = "task-4832264" [ 972.962667] env[62914]: _type = "Task" [ 972.962667] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.962667] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.963082] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 972.963082] env[62914]: value = "task-4832263" [ 972.963082] env[62914]: _type = "Task" [ 972.963082] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.980660] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832264, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.987234] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832263, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.995376] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52756db1-255c-13d4-c276-93f9224bc052, 'name': SearchDatastore_Task, 'duration_secs': 0.084143} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.996322] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38534caf-9c84-4865-a5bb-7d386990eda8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.005755] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 973.005755] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caeaa0-211f-0b3d-2730-7efc630370bb" [ 973.005755] env[62914]: _type = "Task" [ 973.005755] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.016281] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caeaa0-211f-0b3d-2730-7efc630370bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.072041] env[62914]: DEBUG nova.network.neutron [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 973.147846] env[62914]: DEBUG nova.network.neutron [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.258684] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52867cbc-691b-533b-d8a9-50ca2164bf7d, 'name': SearchDatastore_Task, 'duration_secs': 0.055453} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.260688] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd5a9e46-b303-4c82-87cd-84eb55d6a844 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.271211] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 973.271211] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52af8198-80eb-29de-c1c0-29b03457cc1b" [ 973.271211] env[62914]: _type = "Task" [ 973.271211] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.281347] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52af8198-80eb-29de-c1c0-29b03457cc1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.451324] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.465962] env[62914]: DEBUG nova.scheduler.client.report [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.488833] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832263, 'name': Rename_Task, 'duration_secs': 0.205502} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.492937] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 973.494220] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.494953] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c89d56d4-c4d2-483a-b9f1-3baf3829d1d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.506641] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 973.506641] env[62914]: value = "task-4832265" [ 973.506641] env[62914]: _type = "Task" [ 973.506641] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.524036] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52caeaa0-211f-0b3d-2730-7efc630370bb, 'name': SearchDatastore_Task, 'duration_secs': 0.089084} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.528319] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.529032] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] fa33e1a5-677a-489c-8c89-a33066b18103/fa33e1a5-677a-489c-8c89-a33066b18103.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 973.530160] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.530580] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9989cc67-6c43-47a5-9843-b0e162d2a9d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.541676] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 973.541676] env[62914]: value = "task-4832266" [ 973.541676] env[62914]: _type = "Task" [ 973.541676] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.555669] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.650599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Releasing lock "refresh_cache-bdec185e-2af7-4379-8c67-03e125750bb4" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.651150] env[62914]: DEBUG nova.compute.manager [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 973.651389] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 973.652412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddce911-f498-4687-ad0b-040b9e19e451 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.663255] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 973.663613] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f8da816-a72c-464a-8991-958fc4cd9575 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.678883] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 973.678883] env[62914]: value = "task-4832267" [ 973.678883] env[62914]: _type = "Task" [ 973.678883] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.691534] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.786646] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52af8198-80eb-29de-c1c0-29b03457cc1b, 'name': SearchDatastore_Task, 'duration_secs': 0.048158} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.786919] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.787289] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 973.787582] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d7df046-3214-449c-8726-b81a23e0bfa3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.799894] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 973.799894] env[62914]: value = "task-4832268" [ 973.799894] env[62914]: _type = "Task" [ 973.799894] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.814209] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.952250] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.981039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.981807] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 973.990739] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.860s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.991012] env[62914]: INFO nova.compute.claims [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.994559] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.026976] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832265, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.052648] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.190954] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832267, 'name': PowerOffVM_Task, 'duration_secs': 0.131295} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.190954] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 974.190954] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 974.190954] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ba89273-12e7-416e-b81b-08ffbabf994b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.216540] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 974.216865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 974.217212] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleting the datastore file [datastore2] bdec185e-2af7-4379-8c67-03e125750bb4 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.217601] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-daf26957-b471-446f-9bbf-069aa9149909 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.225639] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for the task: (returnval){ [ 974.225639] env[62914]: value = "task-4832270" [ 974.225639] env[62914]: _type = "Task" [ 974.225639] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.235428] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.309846] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.450237] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832255, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.672404} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.450592] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ebc6dd3-004e-4687-828d-5c558c734f0f/2ebc6dd3-004e-4687-828d-5c558c734f0f.vmdk to [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 974.451494] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fab966-99d8-4533-9fbc-7d14a0a03810 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.474119] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.474508] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-440f0f74-f968-4067-a6c7-4ad6e3283e0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.498726] env[62914]: DEBUG nova.compute.utils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 974.502499] env[62914]: DEBUG oslo_vmware.api [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832264, 'name': PowerOnVM_Task, 'duration_secs': 1.311579} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.504964] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 974.504964] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 974.506889] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 974.507989] env[62914]: INFO nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Took 9.12 seconds to spawn the instance on the hypervisor. [ 974.507989] env[62914]: DEBUG nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 974.507989] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 974.507989] env[62914]: value = "task-4832271" [ 974.507989] env[62914]: _type = "Task" [ 974.507989] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.508702] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7e8d60-0cd0-47b4-8773-c01a781d1fcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.530585] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.535175] env[62914]: DEBUG oslo_vmware.api [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832265, 'name': PowerOnVM_Task, 'duration_secs': 0.658505} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.535486] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 974.535694] env[62914]: INFO nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Took 11.77 seconds to spawn the instance on the hypervisor. [ 974.535880] env[62914]: DEBUG nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 974.536700] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e8e56c-dea3-4932-b27c-d024f6c36650 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.556617] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.558585] env[62914]: DEBUG nova.policy [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493014f3d66341759a8e03a7878d0af8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78ce97bf0a6a4b65b3cd1e316989a1ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 974.736826] env[62914]: DEBUG oslo_vmware.api [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Task: {'id': task-4832270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167248} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.737088] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.737299] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 974.737488] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 974.737674] env[62914]: INFO nova.compute.manager [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Took 1.09 seconds to destroy the instance on the hypervisor. [ 974.737921] env[62914]: DEBUG oslo.service.loopingcall [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.738148] env[62914]: DEBUG nova.compute.manager [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 974.738230] env[62914]: DEBUG nova.network.neutron [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 974.764371] env[62914]: DEBUG nova.network.neutron [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 974.809964] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.856658] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Successfully created port: 771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 974.887680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.887936] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.005985] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 975.024589] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.041491] env[62914]: INFO nova.compute.manager [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Took 41.02 seconds to build instance. [ 975.058574] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.061461] env[62914]: INFO nova.compute.manager [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Took 44.98 seconds to build instance. [ 975.270351] env[62914]: DEBUG nova.network.neutron [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.311442] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.394710] env[62914]: DEBUG nova.compute.utils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.454089] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01518768-134f-4845-990b-d519bc7ae7ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.464559] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a40df6-0bd2-4077-8cb3-b57421811a75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.499569] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed77391b-550d-4312-a4a9-4d8d00439b73 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.509207] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f68862f-7164-4e51-871e-b8b47e9a0cf1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.531596] env[62914]: DEBUG nova.compute.provider_tree [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.536647] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832271, 'name': ReconfigVM_Task, 'duration_secs': 0.99425} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.537124] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfigured VM instance instance-00000038 to attach disk [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04/dc99b470-4334-408d-8853-d2e9b9204d04.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.538522] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'device_type': 'disk', 'encrypted': False, 'encryption_format': None, 'boot_index': 0, 'guest_format': None, 'encryption_secret_uuid': None, 'disk_bus': None, 'encryption_options': None, 'size': 0, 'image_id': '75c43660-b52b-450e-ba36-0f721e14bc6c'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'mount_device': '/dev/sdb', 'attachment_id': 'dd89dc52-363b-48f4-a04a-7905a252c8ed', 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941994', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'name': 'volume-37c44301-e13e-475c-b93b-3d45c6886107', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'dc99b470-4334-408d-8853-d2e9b9204d04', 'attached_at': '', 'detached_at': '', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'serial': '37c44301-e13e-475c-b93b-3d45c6886107'}, 'delete_on_termination': False, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62914) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 975.539665] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 975.539665] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941994', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'name': 'volume-37c44301-e13e-475c-b93b-3d45c6886107', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'dc99b470-4334-408d-8853-d2e9b9204d04', 'attached_at': '', 'detached_at': '', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'serial': '37c44301-e13e-475c-b93b-3d45c6886107'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 975.539813] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cee952-ade5-4a12-af2c-4b6f0e7f1d75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.545141] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7a9b7e26-bc2f-4ad3-a054-2a1ca3375813 tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.546s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.561821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff60fd45-adc3-4236-b06c-10b281fbdf57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.568379] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bb58eb02-64e4-4023-b0b1-1af24f427632 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.500s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.568782] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.593918] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] volume-37c44301-e13e-475c-b93b-3d45c6886107/volume-37c44301-e13e-475c-b93b-3d45c6886107.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.593918] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aadb9ab0-c273-48b0-bdde-ddbe0faa4550 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.613562] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 975.613562] env[62914]: value = "task-4832272" [ 975.613562] env[62914]: _type = "Task" [ 975.613562] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.622645] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.773273] env[62914]: INFO nova.compute.manager [-] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Took 1.03 seconds to deallocate network for instance. [ 975.815162] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.898082] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.022926] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 976.037962] env[62914]: DEBUG nova.scheduler.client.report [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.051618] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 976.051874] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 976.052050] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.052302] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 976.052473] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.052631] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 976.052845] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 976.053015] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 976.053204] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 976.053378] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 976.053553] env[62914]: DEBUG nova.virt.hardware [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.054392] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ef1a3d-8b9b-4799-a601-a4830913601d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.066954] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832266, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.031852} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.067335] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] fa33e1a5-677a-489c-8c89-a33066b18103/fa33e1a5-677a-489c-8c89-a33066b18103.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 976.067557] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.068776] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d797802a-a643-4023-b9ee-37afd981a430 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.072704] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c5de43f-b79c-4b7e-b3de-504442f870c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.087332] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 976.087332] env[62914]: value = "task-4832273" [ 976.087332] env[62914]: _type = "Task" [ 976.087332] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.097240] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.125290] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.281342] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.315582] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832268, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.235357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.315582] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 976.315582] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.315582] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd60757e-1d45-480c-955a-a5fe5670d767 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.323563] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 976.323563] env[62914]: value = "task-4832274" [ 976.323563] env[62914]: _type = "Task" [ 976.323563] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.333361] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.395711] env[62914]: DEBUG nova.compute.manager [None req-d4c16b05-bafa-4d2b-9d79-4a5343bd2831 tempest-ServerDiagnosticsV248Test-1136005245 tempest-ServerDiagnosticsV248Test-1136005245-project-admin] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 976.397417] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df2df48-a242-423a-bf4b-a226a43bc847 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.408021] env[62914]: INFO nova.compute.manager [None req-d4c16b05-bafa-4d2b-9d79-4a5343bd2831 tempest-ServerDiagnosticsV248Test-1136005245 tempest-ServerDiagnosticsV248Test-1136005245-project-admin] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Retrieving diagnostics [ 976.408622] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b090b74-4a9a-401f-9229-68a8a92940cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.452582] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Successfully updated port: 771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 976.543287] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.544302] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 976.547796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.573s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.548045] env[62914]: DEBUG nova.objects.instance [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'resources' on Instance uuid 47aa2783-367e-4445-8261-7c75eb7561ab {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.600236] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.600581] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.601449] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbdd0e2-a905-489d-8e81-e3424c214a9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.625392] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] fa33e1a5-677a-489c-8c89-a33066b18103/fa33e1a5-677a-489c-8c89-a33066b18103.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.629180] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52ae2502-8b81-4cae-b846-0531d4311355 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.650257] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832272, 'name': ReconfigVM_Task, 'duration_secs': 0.586361} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.651894] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfigured VM instance instance-00000038 to attach disk [datastore1] volume-37c44301-e13e-475c-b93b-3d45c6886107/volume-37c44301-e13e-475c-b93b-3d45c6886107.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.656888] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 976.656888] env[62914]: value = "task-4832275" [ 976.656888] env[62914]: _type = "Task" [ 976.656888] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.657167] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea37a6f3-ea15-4620-8a36-efc51f52b361 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.678699] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.680827] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 976.680827] env[62914]: value = "task-4832276" [ 976.680827] env[62914]: _type = "Task" [ 976.680827] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.692727] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.833734] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081389} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.833926] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.834969] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de2f110-d6ec-44e5-b916-c334d91ce9a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.864369] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.865046] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67a881cf-4a70-42b8-bffd-5bdb0fb2e3a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.887684] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 976.887684] env[62914]: value = "task-4832277" [ 976.887684] env[62914]: _type = "Task" [ 976.887684] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.890378] env[62914]: DEBUG nova.compute.manager [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Received event network-vif-plugged-771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 976.890611] env[62914]: DEBUG oslo_concurrency.lockutils [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] Acquiring lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.890861] env[62914]: DEBUG oslo_concurrency.lockutils [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.891080] env[62914]: DEBUG oslo_concurrency.lockutils [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.891399] env[62914]: DEBUG nova.compute.manager [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] No waiting events found dispatching network-vif-plugged-771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 976.891610] env[62914]: WARNING nova.compute.manager [req-07876dca-d3a0-4ce0-a1df-684d70851c65 req-a523e0f4-3285-40a4-b120-e9ae44eac947 service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Received unexpected event network-vif-plugged-771b1fec-a03e-465e-93bb-e565d996e361 for instance with vm_state building and task_state spawning. [ 976.903605] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832277, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.955393] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.955631] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.955844] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 976.978680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.978997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.979408] env[62914]: INFO nova.compute.manager [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Attaching volume 864a42ed-47df-4ae7-ace0-224fba823a1f to /dev/sdb [ 977.020683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02bbc2b-f2c7-4439-bd37-e9a7a67c4cc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.029944] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c58369-66c3-42dd-be2e-56b680325003 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.047697] env[62914]: DEBUG nova.virt.block_device [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating existing volume attachment record: cd96b999-859f-4c9d-9d13-fc20bb15c668 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 977.054188] env[62914]: DEBUG nova.compute.utils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 977.056805] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 977.056805] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 977.121602] env[62914]: DEBUG nova.policy [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e291489da35649d0a2c69f98714d89ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea39ac6e2d400ca89bbffc20d764ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 977.181975] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832275, 'name': ReconfigVM_Task, 'duration_secs': 0.427192} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.188767] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfigured VM instance instance-00000058 to attach disk [datastore1] fa33e1a5-677a-489c-8c89-a33066b18103/fa33e1a5-677a-489c-8c89-a33066b18103.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.189371] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae60a03b-dccd-4b08-a011-a8effbca6e52 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.196691] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832276, 'name': ReconfigVM_Task, 'duration_secs': 0.220643} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.198264] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941994', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'name': 'volume-37c44301-e13e-475c-b93b-3d45c6886107', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'dc99b470-4334-408d-8853-d2e9b9204d04', 'attached_at': '', 'detached_at': '', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'serial': '37c44301-e13e-475c-b93b-3d45c6886107'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 977.198953] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 977.198953] env[62914]: value = "task-4832278" [ 977.198953] env[62914]: _type = "Task" [ 977.198953] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.199358] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f7d828b-c494-401d-99d9-c2b05885a17c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.215378] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832278, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.218851] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 977.218851] env[62914]: value = "task-4832280" [ 977.218851] env[62914]: _type = "Task" [ 977.218851] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.231131] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832280, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.406252] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.456420] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Successfully created port: 2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.498690] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 977.553555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c569e64-7725-4dbd-a923-50aebe808c24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.561759] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 977.565493] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70ae480-8688-4925-885b-e2c269e7dbff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.610247] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15857ce2-600b-436f-b2e4-b1d2952c9bf7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.613512] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.613914] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.614222] env[62914]: DEBUG nova.compute.manager [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 977.615947] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be397ea-1d14-4fd0-9e32-acab7bb6b75e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.627377] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb0f306-dd52-4edf-870b-1b4619189436 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.632496] env[62914]: DEBUG nova.compute.manager [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 977.633291] env[62914]: DEBUG nova.objects.instance [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'flavor' on Instance uuid dac99ed2-aed9-4c3e-bcab-a8de9967990c {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.648724] env[62914]: DEBUG nova.compute.provider_tree [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.712544] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832278, 'name': Rename_Task, 'duration_secs': 0.377712} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.712729] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 977.713261] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16fa9890-e0d0-45cc-8a8a-f5bb383d7f02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.720921] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 977.720921] env[62914]: value = "task-4832283" [ 977.720921] env[62914]: _type = "Task" [ 977.720921] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.735170] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832280, 'name': Rename_Task, 'duration_secs': 0.328352} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.739156] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 977.739308] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.739654] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02018f45-fb46-41db-bbb8-b3961cd91896 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.748542] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 977.748542] env[62914]: value = "task-4832284" [ 977.748542] env[62914]: _type = "Task" [ 977.748542] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.758408] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.767726] env[62914]: DEBUG nova.network.neutron [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Updating instance_info_cache with network_info: [{"id": "771b1fec-a03e-465e-93bb-e565d996e361", "address": "fa:16:3e:61:f0:19", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap771b1fec-a0", "ovs_interfaceid": "771b1fec-a03e-465e-93bb-e565d996e361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.904340] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832277, 'name': ReconfigVM_Task, 'duration_secs': 0.586752} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.904683] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.905365] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dada77c2-b407-4e7d-9001-4f3150135101 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.913562] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 977.913562] env[62914]: value = "task-4832285" [ 977.913562] env[62914]: _type = "Task" [ 977.913562] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.924681] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832285, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.140604] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 978.140946] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14ab38f7-566f-4023-b24a-9857230a37d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.150115] env[62914]: DEBUG oslo_vmware.api [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 978.150115] env[62914]: value = "task-4832286" [ 978.150115] env[62914]: _type = "Task" [ 978.150115] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.155730] env[62914]: DEBUG nova.scheduler.client.report [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.166134] env[62914]: DEBUG oslo_vmware.api [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.235601] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.260189] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832284, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.271482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.272075] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Instance network_info: |[{"id": "771b1fec-a03e-465e-93bb-e565d996e361", "address": "fa:16:3e:61:f0:19", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap771b1fec-a0", "ovs_interfaceid": "771b1fec-a03e-465e-93bb-e565d996e361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 978.272762] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:f0:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78b49840-c3fc-455c-8491-a253ccd92bb5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '771b1fec-a03e-465e-93bb-e565d996e361', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.281542] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Creating folder: Project (78ce97bf0a6a4b65b3cd1e316989a1ed). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 978.281917] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97de61b5-f3b5-4eab-b655-9913a9dc2b10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.299408] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Created folder: Project (78ce97bf0a6a4b65b3cd1e316989a1ed) in parent group-v941773. [ 978.299752] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Creating folder: Instances. Parent ref: group-v942014. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 978.300164] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be5ccbc9-86f1-4c71-832f-804c92dfe0a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.314889] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Created folder: Instances in parent group-v942014. [ 978.315347] env[62914]: DEBUG oslo.service.loopingcall [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.315639] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 978.315915] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3fab072-38d6-4368-9cbe-d249e8c9c9d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.341229] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.341229] env[62914]: value = "task-4832289" [ 978.341229] env[62914]: _type = "Task" [ 978.341229] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.351723] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832289, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.427783] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832285, 'name': Rename_Task, 'duration_secs': 0.189638} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.427783] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 978.428729] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6785fd48-435d-4aa4-8b5b-6a393d90e867 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.437926] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 978.437926] env[62914]: value = "task-4832290" [ 978.437926] env[62914]: _type = "Task" [ 978.437926] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.448865] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.578642] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 978.615020] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.615020] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.615020] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.615359] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.615663] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.615964] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.617823] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.617823] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.617823] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.617823] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.617823] env[62914]: DEBUG nova.virt.hardware [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.618077] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e574ea91-c4e7-46ba-8d98-ed91badbd699 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.627691] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4c1543-f459-4598-9637-3c9d114e01bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.661504] env[62914]: DEBUG oslo_vmware.api [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832286, 'name': PowerOffVM_Task, 'duration_secs': 0.220101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.661982] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 978.662243] env[62914]: DEBUG nova.compute.manager [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 978.663214] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.667106] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376bb01c-fb47-4a87-bfce-ed6b054e9098 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.670998] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.747s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.671292] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.673601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.204s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.675240] env[62914]: INFO nova.compute.claims [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.696042] env[62914]: INFO nova.scheduler.client.report [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted allocations for instance 47aa2783-367e-4445-8261-7c75eb7561ab [ 978.698638] env[62914]: INFO nova.scheduler.client.report [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocations for instance 557c0538-fc4a-403a-a9cb-b706e2260b1c [ 978.736875] env[62914]: DEBUG oslo_vmware.api [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832283, 'name': PowerOnVM_Task, 'duration_secs': 0.76822} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.737286] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 978.737555] env[62914]: INFO nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Took 9.68 seconds to spawn the instance on the hypervisor. [ 978.737799] env[62914]: DEBUG nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 978.738735] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0703bb-aa65-4cf8-bfaa-d426e600b189 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.763423] env[62914]: DEBUG oslo_vmware.api [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832284, 'name': PowerOnVM_Task, 'duration_secs': 0.703504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.763774] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 978.852315] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832289, 'name': CreateVM_Task, 'duration_secs': 0.462009} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.852511] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 978.853227] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.853837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.853837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 978.854047] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a36dc79e-db53-4915-91f7-c59e4d007c91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.859875] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 978.859875] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a46a5-43c9-c088-3770-a31c84ee7c23" [ 978.859875] env[62914]: _type = "Task" [ 978.859875] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.871204] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a46a5-43c9-c088-3770-a31c84ee7c23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.888044] env[62914]: DEBUG nova.compute.manager [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 978.888504] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8463b186-a86a-49ba-b4b6-8bb6fcaa3be9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.917351] env[62914]: DEBUG nova.compute.manager [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Received event network-changed-771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 978.917543] env[62914]: DEBUG nova.compute.manager [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Refreshing instance network info cache due to event network-changed-771b1fec-a03e-465e-93bb-e565d996e361. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 978.917737] env[62914]: DEBUG oslo_concurrency.lockutils [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] Acquiring lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.917890] env[62914]: DEBUG oslo_concurrency.lockutils [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] Acquired lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.918070] env[62914]: DEBUG nova.network.neutron [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Refreshing network info cache for port 771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 978.948393] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832290, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.064148] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Successfully updated port: 2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.189601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-61766057-9ccd-4aa2-9c06-1438fce81e9e tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.575s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.210382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-88493e8f-78c3-4341-a953-4ada19d478d2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "557c0538-fc4a-403a-a9cb-b706e2260b1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.796s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.210670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-29240860-38f4-4cd5-85ae-f31ea22eb85f tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "47aa2783-367e-4445-8261-7c75eb7561ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.411s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.260801] env[62914]: INFO nova.compute.manager [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Took 42.72 seconds to build instance. [ 979.371900] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a46a5-43c9-c088-3770-a31c84ee7c23, 'name': SearchDatastore_Task, 'duration_secs': 0.012293} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.372341] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.372600] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.373416] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.373629] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.373863] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.374228] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e569f134-8855-491e-b822-4dddc099ce0c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.385390] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.385690] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 979.386496] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c883e99-75c1-44df-ae1f-f9689959f645 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.392561] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 979.392561] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52087980-3b73-0f96-73de-5ca867cf2263" [ 979.392561] env[62914]: _type = "Task" [ 979.392561] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.408947] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52087980-3b73-0f96-73de-5ca867cf2263, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.410123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4c6f8042-4435-454c-8b62-9a70bb3e15c5 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 57.216s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.449623] env[62914]: DEBUG oslo_vmware.api [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832290, 'name': PowerOnVM_Task, 'duration_secs': 0.665529} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.450209] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 979.450503] env[62914]: DEBUG nova.compute.manager [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 979.451409] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b758412e-1e69-497a-8a0e-708f6c6617e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.569727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.569727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.569727] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 979.667461] env[62914]: DEBUG nova.network.neutron [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Updated VIF entry in instance network info cache for port 771b1fec-a03e-465e-93bb-e565d996e361. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 979.668109] env[62914]: DEBUG nova.network.neutron [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Updating instance_info_cache with network_info: [{"id": "771b1fec-a03e-465e-93bb-e565d996e361", "address": "fa:16:3e:61:f0:19", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap771b1fec-a0", "ovs_interfaceid": "771b1fec-a03e-465e-93bb-e565d996e361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.762393] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1329149c-1ea6-433b-a1e9-d36681f23451 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.236s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.904467] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52087980-3b73-0f96-73de-5ca867cf2263, 'name': SearchDatastore_Task, 'duration_secs': 0.009655} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.909077] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbf22f87-3371-48f5-8451-8d354bf1e479 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.918615] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 979.918615] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217abae-19f4-b8ab-65c2-25fb77d8240a" [ 979.918615] env[62914]: _type = "Task" [ 979.918615] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.928443] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217abae-19f4-b8ab-65c2-25fb77d8240a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.978200] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.120699] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.129584] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db78914-39f5-421e-aaf1-013f4b88db58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.138944] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0347b4b-d8d6-4686-a86b-212cdb59a360 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.178116] env[62914]: DEBUG oslo_concurrency.lockutils [req-aee35463-eb79-4704-b61f-80cb3a58efc5 req-79af0c71-b7e1-45ad-9e6f-4c233cc3e88f service nova] Releasing lock "refresh_cache-b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.182392] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00061c52-ef5f-46c3-899c-88397587cf6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.188734] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1565a87f-02af-49dd-9fef-b92e4766c006 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.206507] env[62914]: DEBUG nova.compute.provider_tree [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.401007] env[62914]: DEBUG nova.network.neutron [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.430475] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5217abae-19f4-b8ab-65c2-25fb77d8240a, 'name': SearchDatastore_Task, 'duration_secs': 0.012043} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.430475] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.430475] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d/b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 980.430690] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7dfaea8-e9a1-48d5-b1b1-acd27de5facc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.439718] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 980.439718] env[62914]: value = "task-4832292" [ 980.439718] env[62914]: _type = "Task" [ 980.439718] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.449535] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.711073] env[62914]: DEBUG nova.scheduler.client.report [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.757333] env[62914]: INFO nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Rebuilding instance [ 980.848563] env[62914]: DEBUG nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 980.849865] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302684ab-4df6-44e2-94a0-83cdb1ace05b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.905771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.906214] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Instance network_info: |[{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 980.906752] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:01:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2295762d-8e27-469d-a292-9ef453b210d6', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.920579] env[62914]: DEBUG oslo.service.loopingcall [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.922228] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 980.923416] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c0efd68-370a-4b11-87c0-225adb7849b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.958153] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832292, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.960194] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.960194] env[62914]: value = "task-4832293" [ 980.960194] env[62914]: _type = "Task" [ 980.960194] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.972898] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832293, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.043930] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Received event network-vif-plugged-2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 981.044618] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.045172] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.046728] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.046728] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] No waiting events found dispatching network-vif-plugged-2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 981.046728] env[62914]: WARNING nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Received unexpected event network-vif-plugged-2295762d-8e27-469d-a292-9ef453b210d6 for instance with vm_state building and task_state spawning. [ 981.046728] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Received event network-changed-2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 981.046728] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Refreshing instance network info cache due to event network-changed-2295762d-8e27-469d-a292-9ef453b210d6. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 981.048183] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquiring lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.051179] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquired lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.051179] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Refreshing network info cache for port 2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 981.220784] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.221469] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 981.224669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.436s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.225577] env[62914]: DEBUG nova.objects.instance [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lazy-loading 'resources' on Instance uuid ea06d3c3-d836-4e66-ac66-42f9886cd5de {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.228364] env[62914]: INFO nova.compute.manager [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Rebuilding instance [ 981.288405] env[62914]: DEBUG nova.compute.manager [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 981.289537] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1f5f49-d547-4920-b3a1-df6d172a7323 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.375975] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 981.376501] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de5f85f0-4e83-4b4d-b34b-a3130b434a9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.386655] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 981.386655] env[62914]: value = "task-4832294" [ 981.386655] env[62914]: _type = "Task" [ 981.386655] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.399321] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 981.399321] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 981.400279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b8fb19-8c9f-4f54-a42c-723f4700e95f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.409244] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 981.409715] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47636ca6-16a7-4b04-acaa-60210e6db593 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.458735] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520632} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.459086] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d/b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 981.459324] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.459694] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd5b4377-23b8-4083-a94f-50a73b33e6f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.471356] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832293, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.473295] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 981.473295] env[62914]: value = "task-4832296" [ 981.473295] env[62914]: _type = "Task" [ 981.473295] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.486263] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.516717] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 981.516957] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 981.517168] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore1] dac99ed2-aed9-4c3e-bcab-a8de9967990c {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.517467] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57801196-3289-4b6a-bd07-91964ff3d271 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.527351] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 981.527351] env[62914]: value = "task-4832297" [ 981.527351] env[62914]: _type = "Task" [ 981.527351] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.537885] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.730946] env[62914]: DEBUG nova.compute.utils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 981.737877] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 981.803884] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 981.804917] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b037ac8a-d3d6-4915-a436-b9dd0230dc21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.815025] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 981.815025] env[62914]: value = "task-4832298" [ 981.815025] env[62914]: _type = "Task" [ 981.815025] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.838738] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.906701] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updated VIF entry in instance network info cache for port 2295762d-8e27-469d-a292-9ef453b210d6. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 981.907109] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.973981] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832293, 'name': CreateVM_Task, 'duration_secs': 0.575302} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.980343] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 981.981743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.981743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.981968] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.982617] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bda024d6-ce45-48e5-9389-694cd19e6589 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.988455] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12984} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.990044] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.990498] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 981.990498] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52be0298-a67d-67e8-f17d-0955180388a2" [ 981.990498] env[62914]: _type = "Task" [ 981.990498] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.991437] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29880f15-0471-4b32-9be3-0b33db65d759 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.019225] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52be0298-a67d-67e8-f17d-0955180388a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.029240] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d/b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.032811] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95c74530-8c60-492a-9442-833604c8080a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.059656] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275103} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.063953] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.064107] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 982.064314] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 982.066996] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 982.066996] env[62914]: value = "task-4832299" [ 982.066996] env[62914]: _type = "Task" [ 982.066996] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.080615] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832299, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.121833] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 982.123044] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942013', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'name': 'volume-864a42ed-47df-4ae7-ace0-224fba823a1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ec73b924-e132-44b6-bc67-2b3c08592f03', 'attached_at': '', 'detached_at': '', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'serial': '864a42ed-47df-4ae7-ace0-224fba823a1f'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 982.123443] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52202c6e-7448-489d-aae2-1ab236119a5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.153168] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c817b322-1473-4233-bcb5-e3a064c41699 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.181028] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-864a42ed-47df-4ae7-ace0-224fba823a1f/volume-864a42ed-47df-4ae7-ace0-224fba823a1f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.184759] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-334264d2-82cb-4f74-a346-0b2b56ffb788 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.206634] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 982.206634] env[62914]: value = "task-4832300" [ 982.206634] env[62914]: _type = "Task" [ 982.206634] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.217193] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.238342] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 982.326471] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832298, 'name': PowerOffVM_Task, 'duration_secs': 0.450076} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.328070] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 982.328410] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 982.329386] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73eff38f-980c-4d6e-ae6d-5b98da7ed0ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.333361] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46c1e05-80bd-401c-9117-c855ba301803 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.342520] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 982.344336] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-924967a6-d766-47db-9d3b-0eca54abd3fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.346910] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a6458c-0b15-43ff-b5fa-8b83d6d52137 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.383079] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccab6e5-9b0c-41a3-8d80-3ec864f9c219 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.391575] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba08f2fc-2f68-432f-9055-a35b9e41b482 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.407738] env[62914]: DEBUG nova.compute.provider_tree [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.412313] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Releasing lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.412665] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 982.412847] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 982.413107] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.413276] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.413521] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.424656] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 982.425099] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 982.425462] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.426260] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec689c97-7183-4cdc-bbe5-a8094b866e7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.434201] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 982.434201] env[62914]: value = "task-4832302" [ 982.434201] env[62914]: _type = "Task" [ 982.434201] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.444895] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.505521] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52be0298-a67d-67e8-f17d-0955180388a2, 'name': SearchDatastore_Task, 'duration_secs': 0.015444} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.505864] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.506166] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.506446] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.506619] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.506818] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.507147] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9dda5e4-55de-46cd-af6d-4eaafe9e5501 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.518244] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.518477] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 982.519447] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-079c6011-c7ba-4c18-ba6c-5af1ba0325ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.526406] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 982.526406] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524ef571-5397-b9d7-1ecb-e7e7c52c36ae" [ 982.526406] env[62914]: _type = "Task" [ 982.526406] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.536566] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524ef571-5397-b9d7-1ecb-e7e7c52c36ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.581859] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832299, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.721437] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.800129] env[62914]: DEBUG nova.compute.manager [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 982.800498] env[62914]: DEBUG nova.compute.manager [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing instance network info cache due to event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 982.800828] env[62914]: DEBUG oslo_concurrency.lockutils [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.801104] env[62914]: DEBUG oslo_concurrency.lockutils [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.801398] env[62914]: DEBUG nova.network.neutron [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.910038] env[62914]: DEBUG nova.scheduler.client.report [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.949821] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156561} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.953303] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.953581] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 982.953836] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 983.040065] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524ef571-5397-b9d7-1ecb-e7e7c52c36ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011291} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.041165] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ca828e-8a50-477e-a598-743e54f1ed5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.047908] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 983.047908] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205edf6-f5d8-701f-1b8b-329b5937023e" [ 983.047908] env[62914]: _type = "Task" [ 983.047908] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.057391] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205edf6-f5d8-701f-1b8b-329b5937023e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.086804] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832299, 'name': ReconfigVM_Task, 'duration_secs': 0.549344} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.086804] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Reconfigured VM instance instance-00000059 to attach disk [datastore2] b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d/b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.087709] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba7044ab-fad7-4148-9236-ad01b3802658 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.094584] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 983.094584] env[62914]: value = "task-4832303" [ 983.094584] env[62914]: _type = "Task" [ 983.094584] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.109160] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832303, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.111573] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.111808] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.111972] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.112187] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.112368] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.112528] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.112761] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.112908] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.113092] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.113264] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.113467] env[62914]: DEBUG nova.virt.hardware [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.114317] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b475a477-170c-4cd1-85bd-217782f69295 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.122755] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673ae94f-4641-4534-ba83-d64885741e58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.142659] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:c1:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2fe080d-6273-4a2c-b4dc-2d9ec37d4161', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.149634] env[62914]: DEBUG oslo.service.loopingcall [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.155170] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 983.155170] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ad198a9-adbd-406b-87f5-37d38a501748 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.174600] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.174600] env[62914]: value = "task-4832304" [ 983.174600] env[62914]: _type = "Task" [ 983.174600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.190222] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832304, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.213311] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 983.213727] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.222262] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832300, 'name': ReconfigVM_Task, 'duration_secs': 0.904914} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.222262] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-864a42ed-47df-4ae7-ace0-224fba823a1f/volume-864a42ed-47df-4ae7-ace0-224fba823a1f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.228956] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d186dfe-c4ee-4f77-9c5d-06328cf47361 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.246709] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 983.246709] env[62914]: value = "task-4832305" [ 983.246709] env[62914]: _type = "Task" [ 983.246709] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.247954] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 983.260085] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832305, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.278436] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.278814] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.279039] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.279279] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.279782] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.279782] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.279989] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.280220] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.280455] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.280663] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.280888] env[62914]: DEBUG nova.virt.hardware [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.282397] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a829e7f5-e270-4c4a-a7f8-4c02afc1cae1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.292606] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fa6097-3135-433e-aa92-47d9b09a2459 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.313314] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.318962] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Creating folder: Project (450b199fddd049348541e7a6d92f6a67). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 983.320012] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20a460ea-8bb2-4015-b15c-6f0c62c7d5a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.333192] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Created folder: Project (450b199fddd049348541e7a6d92f6a67) in parent group-v941773. [ 983.333422] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Creating folder: Instances. Parent ref: group-v942019. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 983.334738] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6a73801-06cd-4768-af29-3fd1e73f4d80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.344701] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Created folder: Instances in parent group-v942019. [ 983.344963] env[62914]: DEBUG oslo.service.loopingcall [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.345307] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 983.345451] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bee9dda9-67f1-4363-981a-95a888d2ff09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.366779] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.366779] env[62914]: value = "task-4832308" [ 983.366779] env[62914]: _type = "Task" [ 983.366779] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.375831] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832308, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.416197] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.419835] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.453s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.420189] env[62914]: DEBUG nova.objects.instance [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lazy-loading 'resources' on Instance uuid 3e6a3787-3e9c-411c-9c3c-305a62061b47 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.456643] env[62914]: INFO nova.scheduler.client.report [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleted allocations for instance ea06d3c3-d836-4e66-ac66-42f9886cd5de [ 983.562478] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5205edf6-f5d8-701f-1b8b-329b5937023e, 'name': SearchDatastore_Task, 'duration_secs': 0.011224} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.562933] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.563314] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 983.563479] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a64b3dfa-d461-4375-baf7-e08b59b58271 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.571599] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 983.571599] env[62914]: value = "task-4832309" [ 983.571599] env[62914]: _type = "Task" [ 983.571599] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.581750] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.594663] env[62914]: DEBUG nova.network.neutron [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updated VIF entry in instance network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 983.595061] env[62914]: DEBUG nova.network.neutron [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.608684] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832303, 'name': Rename_Task, 'duration_secs': 0.320153} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.609904] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 983.610232] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f7b5bfa-215d-46c7-b9dd-a2790952b0ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.618207] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 983.618207] env[62914]: value = "task-4832310" [ 983.618207] env[62914]: _type = "Task" [ 983.618207] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.627463] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.686048] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832304, 'name': CreateVM_Task, 'duration_secs': 0.450823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.686300] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 983.687102] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.687306] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.687641] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.687936] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31afc689-7d0b-46af-bb24-05104cbde6ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.693607] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 983.693607] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa3038-2c3a-2821-f385-97a7fdb40ff7" [ 983.693607] env[62914]: _type = "Task" [ 983.693607] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.704026] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa3038-2c3a-2821-f385-97a7fdb40ff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.716975] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.717305] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 983.717513] env[62914]: DEBUG nova.compute.manager [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing instance network info cache due to event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 983.717787] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.757933] env[62914]: DEBUG oslo_vmware.api [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832305, 'name': ReconfigVM_Task, 'duration_secs': 0.185087} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.758284] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942013', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'name': 'volume-864a42ed-47df-4ae7-ace0-224fba823a1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ec73b924-e132-44b6-bc67-2b3c08592f03', 'attached_at': '', 'detached_at': '', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'serial': '864a42ed-47df-4ae7-ace0-224fba823a1f'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 983.878840] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832308, 'name': CreateVM_Task, 'duration_secs': 0.362309} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.879121] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 983.879642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.969947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7fd34a5-ae73-4f2a-9111-c97db9f9b2a8 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "ea06d3c3-d836-4e66-ac66-42f9886cd5de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.271s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.014599] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 984.015328] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 984.016690] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.017502] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 984.020032] env[62914]: DEBUG nova.virt.hardware [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 984.024031] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9063dbb1-05c3-4254-b340-eaeecb644080 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.038887] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d520aa-ae97-4146-9ab2-19a1291efa15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.064016] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:a1:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.077440] env[62914]: DEBUG oslo.service.loopingcall [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.080746] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.085629] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebba9609-eb14-4548-8338-ff425b7a2c0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.103298] env[62914]: DEBUG oslo_concurrency.lockutils [req-e38e88bc-667a-4254-a850-6b23ddf89d7a req-5299adbd-2e34-40ce-95c3-e0be262427d5 service nova] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.104432] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.104713] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 984.110026] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518043} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.111349] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 984.111712] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.113930] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.113930] env[62914]: value = "task-4832311" [ 984.113930] env[62914]: _type = "Task" [ 984.113930] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.113930] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90e11343-130a-4df7-b02b-2178eac311e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.128535] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832311, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.130896] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 984.130896] env[62914]: value = "task-4832312" [ 984.130896] env[62914]: _type = "Task" [ 984.130896] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.142627] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832310, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.154402] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.211196] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52aa3038-2c3a-2821-f385-97a7fdb40ff7, 'name': SearchDatastore_Task, 'duration_secs': 0.012254} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.211642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.211993] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.212361] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.212582] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.212853] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.213303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.213746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.214086] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad16e5f2-6570-46fd-8815-19bc0811ddb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.220255] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ac82df-d1f2-48d7-a79e-c68944e687db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.228415] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 984.228415] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52525765-af78-de42-b9d7-3bcc24a4bfb0" [ 984.228415] env[62914]: _type = "Task" [ 984.228415] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.235055] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.235370] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 984.236961] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39c57fba-cfb9-49d4-a235-6e774256825a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.246155] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52525765-af78-de42-b9d7-3bcc24a4bfb0, 'name': SearchDatastore_Task, 'duration_secs': 0.01176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.250346] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.250685] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.250977] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.254032] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 984.254032] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e1aa92-5c94-965d-b83c-84d43465b2d3" [ 984.254032] env[62914]: _type = "Task" [ 984.254032] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.267320] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e1aa92-5c94-965d-b83c-84d43465b2d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.432511] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c56e5d-3444-4bc4-9793-2ec9d40e8914 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.443316] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df9e6e4-c1e2-4561-be40-940d9f6dd03c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.475285] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c607d0-1059-4e0b-b17c-0954627b4552 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.484841] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86c9867-8a19-4143-a0ea-4e464a81481c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.499854] env[62914]: DEBUG nova.compute.provider_tree [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.630307] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832311, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.639364] env[62914]: DEBUG oslo_vmware.api [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832310, 'name': PowerOnVM_Task, 'duration_secs': 0.627828} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.640164] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 984.640629] env[62914]: INFO nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Took 8.62 seconds to spawn the instance on the hypervisor. [ 984.640629] env[62914]: DEBUG nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 984.641635] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af79c13e-a1b8-44aa-8b86-65d4ccac95c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.649561] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08937} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.650252] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.651507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6c7922-c28c-468c-96ff-49ac2cc76935 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.661685] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.662101] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.662463] env[62914]: DEBUG nova.objects.instance [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid aedc785f-619f-4b9f-850f-790f84e57577 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.685775] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.687402] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0807117a-659b-4bb2-8b82-eca31f7e15a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.715861] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 984.715861] env[62914]: value = "task-4832313" [ 984.715861] env[62914]: _type = "Task" [ 984.715861] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.727400] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.767883] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e1aa92-5c94-965d-b83c-84d43465b2d3, 'name': SearchDatastore_Task, 'duration_secs': 0.010681} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.772718] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00cc3dfe-3b17-48df-ac52-f74d72c2669b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.781026] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 984.781026] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c91043-854d-8549-f591-d64ceafad05d" [ 984.781026] env[62914]: _type = "Task" [ 984.781026] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.794285] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c91043-854d-8549-f591-d64ceafad05d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.805860] env[62914]: DEBUG nova.objects.instance [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'flavor' on Instance uuid ec73b924-e132-44b6-bc67-2b3c08592f03 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.912045] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updated VIF entry in instance network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 984.912519] env[62914]: DEBUG nova.network.neutron [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.003812] env[62914]: DEBUG nova.scheduler.client.report [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.125670] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832311, 'name': CreateVM_Task, 'duration_secs': 0.782182} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.125853] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 985.126589] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.126769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.127134] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.127409] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52efdd15-5b45-4924-b9c9-3f70f92488ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.133069] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 985.133069] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528ec942-846b-3fac-d1b0-647cceb55c88" [ 985.133069] env[62914]: _type = "Task" [ 985.133069] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.141830] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528ec942-846b-3fac-d1b0-647cceb55c88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.152082] env[62914]: DEBUG nova.compute.manager [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 985.152678] env[62914]: DEBUG nova.compute.manager [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 985.152678] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.152678] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.152852] env[62914]: DEBUG nova.network.neutron [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 985.171237] env[62914]: INFO nova.compute.manager [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Took 42.20 seconds to build instance. [ 985.231443] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.294485] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c91043-854d-8549-f591-d64ceafad05d, 'name': SearchDatastore_Task, 'duration_secs': 0.01261} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.295742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.295742] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 985.295742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.295742] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.296018] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28ece174-9ca7-424f-a350-71b5df169498 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.298947] env[62914]: DEBUG nova.objects.instance [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid aedc785f-619f-4b9f-850f-790f84e57577 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.300097] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-310dc8d4-b58f-4018-b372-81b6a880c050 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.311261] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 985.311261] env[62914]: value = "task-4832314" [ 985.311261] env[62914]: _type = "Task" [ 985.311261] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.312917] env[62914]: DEBUG oslo_concurrency.lockutils [None req-12ab11a4-a130-4759-b941-e252c8bf5198 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.334s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.314528] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.314528] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 985.318451] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecfb760d-ddb5-4c62-b731-2cbee3d0f8e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.327578] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.330159] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 985.330159] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521c96f7-f964-f7d4-8c61-f3ccee6f15df" [ 985.330159] env[62914]: _type = "Task" [ 985.330159] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.343236] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521c96f7-f964-f7d4-8c61-f3ccee6f15df, 'name': SearchDatastore_Task, 'duration_secs': 0.013847} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.344096] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835671cb-63e1-47d8-bbc1-6a78bf4c8233 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.346763] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.347020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.347240] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.347448] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.347963] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.349936] env[62914]: INFO nova.compute.manager [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Terminating instance [ 985.353590] env[62914]: DEBUG nova.compute.manager [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 985.353834] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 985.354487] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 985.354487] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52faa94f-d32c-de7c-2512-d0fe74cd2eed" [ 985.354487] env[62914]: _type = "Task" [ 985.354487] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.355073] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3f7c58-6835-4932-8d77-6762811fe6f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.367221] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52faa94f-d32c-de7c-2512-d0fe74cd2eed, 'name': SearchDatastore_Task, 'duration_secs': 0.009907} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.369462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.369731] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 985.370424] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 985.370668] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-513f0b92-bb42-4b59-b517-e800e1248391 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.372932] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54d289ed-11c3-45b9-a329-4a9c16b6f4c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.379823] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 985.379823] env[62914]: value = "task-4832316" [ 985.379823] env[62914]: _type = "Task" [ 985.379823] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.381287] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 985.381287] env[62914]: value = "task-4832315" [ 985.381287] env[62914]: _type = "Task" [ 985.381287] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.395106] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.398781] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.416112] env[62914]: DEBUG oslo_concurrency.lockutils [req-01d0e6c8-bd13-409d-92b3-bf358e23f8db req-f521bb5a-8def-462f-9953-47b36f946727 service nova] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.509902] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.512991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.389s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.513280] env[62914]: DEBUG nova.objects.instance [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lazy-loading 'resources' on Instance uuid 13f2a615-aa95-411d-92f8-9ff1b6eba420 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.534208] env[62914]: INFO nova.scheduler.client.report [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted allocations for instance 3e6a3787-3e9c-411c-9c3c-305a62061b47 [ 985.647426] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528ec942-846b-3fac-d1b0-647cceb55c88, 'name': SearchDatastore_Task, 'duration_secs': 0.010449} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.647802] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.648070] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.648330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.648485] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.648691] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.649012] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87a62b06-cde2-4a52-ba0b-75ae90e57f0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.666849] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.666910] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 985.667736] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-639d1e78-6631-4e98-8b68-b5b36fbfa3a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.673540] env[62914]: DEBUG oslo_concurrency.lockutils [None req-693d102d-51b0-4c5b-9c00-b4b90d6a4d78 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.716s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.676435] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 985.676435] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b7db43-0a96-e807-6df6-d7d29a0b90a4" [ 985.676435] env[62914]: _type = "Task" [ 985.676435] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.687115] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b7db43-0a96-e807-6df6-d7d29a0b90a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.730354] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832313, 'name': ReconfigVM_Task, 'duration_secs': 0.635364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.730677] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.731513] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46a0b0e7-5c41-44af-842b-bbfa99e95315 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.740881] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 985.740881] env[62914]: value = "task-4832317" [ 985.740881] env[62914]: _type = "Task" [ 985.740881] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.752967] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832317, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.804714] env[62914]: DEBUG nova.objects.base [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 985.804980] env[62914]: DEBUG nova.network.neutron [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 985.828596] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832314, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.898681] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832316, 'name': PowerOffVM_Task, 'duration_secs': 0.210851} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.902417] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 985.902666] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 985.903191] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832315, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.903493] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76050079-cc28-46c6-9a95-f0ac9da12127 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.932152] env[62914]: DEBUG nova.policy [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 986.004883] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 986.005259] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 986.005510] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleting the datastore file [datastore2] 12e8b0ac-0dec-4928-ae65-ab53992ecab5 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.006126] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ca9401b-5bcb-41c7-8c94-a3941918fd0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.009302] env[62914]: DEBUG nova.network.neutron [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.010026] env[62914]: DEBUG nova.network.neutron [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.021108] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for the task: (returnval){ [ 986.021108] env[62914]: value = "task-4832319" [ 986.021108] env[62914]: _type = "Task" [ 986.021108] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.034069] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.042830] env[62914]: DEBUG oslo_concurrency.lockutils [None req-63d5083f-bbad-468f-8e55-3a74c18fc89b tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "3e6a3787-3e9c-411c-9c3c-305a62061b47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.327s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.192461] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b7db43-0a96-e807-6df6-d7d29a0b90a4, 'name': SearchDatastore_Task, 'duration_secs': 0.067164} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.193825] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c0400e1-ae40-4e6d-ae91-578a7ddaf802 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.203244] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 986.203244] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f0b0ee-f0bc-66c0-1a93-65ef1dbeba9c" [ 986.203244] env[62914]: _type = "Task" [ 986.203244] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.213409] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f0b0ee-f0bc-66c0-1a93-65ef1dbeba9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.263020] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832317, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.330214] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.646163} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.334122] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 986.334576] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.335600] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9bb09eb4-c2a5-42ec-be3a-95f6199e6283 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.347218] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 986.347218] env[62914]: value = "task-4832320" [ 986.347218] env[62914]: _type = "Task" [ 986.347218] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.364485] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.400349] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.901819} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.403770] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 986.404114] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.404657] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c611f29d-f12d-447e-94e7-37af892da2c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.412777] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 986.412777] env[62914]: value = "task-4832321" [ 986.412777] env[62914]: _type = "Task" [ 986.412777] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.425978] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.446626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6871d564-c893-420b-ab13-4d2c0cf6b3d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.458960] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4140f50-309c-441b-9b96-3cd4e334e0e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.511743] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e9f7b7-79ba-456b-ba82-d322152430f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.515042] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab03865-11f3-4e55-b6ef-8fbd7fd67d30 req-ac4996b9-1bdd-444f-a627-44ac9a46ed78 service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.522306] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805266bc-3f97-4ffc-b749-eed311e8f877 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.540897] env[62914]: DEBUG nova.compute.provider_tree [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.547494] env[62914]: DEBUG oslo_vmware.api [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Task: {'id': task-4832319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.37518} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.551856] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.552065] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 986.552242] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 986.552425] env[62914]: INFO nova.compute.manager [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 1.20 seconds to destroy the instance on the hypervisor. [ 986.552680] env[62914]: DEBUG oslo.service.loopingcall [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.553518] env[62914]: DEBUG nova.compute.manager [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 986.553646] env[62914]: DEBUG nova.network.neutron [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 986.715663] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f0b0ee-f0bc-66c0-1a93-65ef1dbeba9c, 'name': SearchDatastore_Task, 'duration_secs': 0.053015} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.716074] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.716474] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 986.716849] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b10962a-6615-4840-8e5b-6c05515ac550 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.726795] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 986.726795] env[62914]: value = "task-4832322" [ 986.726795] env[62914]: _type = "Task" [ 986.726795] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.737469] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.752276] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832317, 'name': Rename_Task, 'duration_secs': 0.561083} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.752502] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 986.752930] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f905213d-e387-4f72-a757-217979debc59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.761740] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 986.761740] env[62914]: value = "task-4832323" [ 986.761740] env[62914]: _type = "Task" [ 986.761740] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.773229] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.858175] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206027} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.858175] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.858175] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe10f82-88fc-4b5a-9e1d-4001ceb4e371 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.882587] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.884575] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57f351a0-ab24-4001-b008-2682bc499ca1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.910203] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 986.910203] env[62914]: value = "task-4832324" [ 986.910203] env[62914]: _type = "Task" [ 986.910203] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.925614] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832324, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.928840] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183362} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.929180] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.930036] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5fdf48-f94c-45dd-92f1-402ac9b76d02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.956387] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.956787] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e662cc9-e937-4c25-b4a4-a64e1568cdb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.980175] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 986.980175] env[62914]: value = "task-4832325" [ 986.980175] env[62914]: _type = "Task" [ 986.980175] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.991060] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832325, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.052876] env[62914]: DEBUG nova.scheduler.client.report [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.060418] env[62914]: DEBUG nova.compute.manager [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 987.242848] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832322, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.275244] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832323, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.403646] env[62914]: DEBUG nova.network.neutron [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.424559] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.457089] env[62914]: DEBUG nova.compute.manager [req-a05a123e-9b91-4543-9c0a-558b90f42af0 req-df3b770a-fdbc-4374-98bb-74b3ecadcf91 service nova] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Received event network-vif-deleted-38e304a6-f447-44af-8c07-955d9d6a842f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 987.494228] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832325, 'name': ReconfigVM_Task, 'duration_secs': 0.344292} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.495473] env[62914]: DEBUG nova.compute.manager [None req-9efddee7-e32a-4f6f-94da-990a31536fde tempest-ServerDiagnosticsV248Test-1136005245 tempest-ServerDiagnosticsV248Test-1136005245-project-admin] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 987.495805] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Reconfigured VM instance instance-0000005b to attach disk [datastore2] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.497356] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28692e38-835c-4fdc-a522-843cdae1f434 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.500831] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04763817-447a-4e3e-a8bd-1e6608056da7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.509725] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 987.509725] env[62914]: value = "task-4832326" [ 987.509725] env[62914]: _type = "Task" [ 987.509725] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.511806] env[62914]: INFO nova.compute.manager [None req-9efddee7-e32a-4f6f-94da-990a31536fde tempest-ServerDiagnosticsV248Test-1136005245 tempest-ServerDiagnosticsV248Test-1136005245-project-admin] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Retrieving diagnostics [ 987.516201] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7347bd6-1152-415e-b21b-6892fafe851b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.526282] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832326, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.559557] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.563141] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.085s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.564812] env[62914]: INFO nova.compute.claims [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.584224] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.587074] env[62914]: INFO nova.scheduler.client.report [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Deleted allocations for instance 13f2a615-aa95-411d-92f8-9ff1b6eba420 [ 987.739443] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781582} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.739767] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 987.740071] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.740401] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea1efba7-cde4-43e7-972b-3b7491f753f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.747796] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 987.747796] env[62914]: value = "task-4832327" [ 987.747796] env[62914]: _type = "Task" [ 987.747796] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.759435] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.775415] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832323, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.906286] env[62914]: INFO nova.compute.manager [-] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Took 1.35 seconds to deallocate network for instance. [ 987.922972] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832324, 'name': ReconfigVM_Task, 'duration_secs': 0.62233} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.923400] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Reconfigured VM instance instance-00000056 to attach disk [datastore2] dac99ed2-aed9-4c3e-bcab-a8de9967990c/dac99ed2-aed9-4c3e-bcab-a8de9967990c.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.924332] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84db8c3b-70e2-49c4-8c05-956471763c23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.931591] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 987.931591] env[62914]: value = "task-4832328" [ 987.931591] env[62914]: _type = "Task" [ 987.931591] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.942050] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832328, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.023326] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832326, 'name': Rename_Task, 'duration_secs': 0.1756} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.023737] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 988.024058] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0380824-c95d-40cc-ad7c-304513ed2e2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.031842] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 988.031842] env[62914]: value = "task-4832329" [ 988.031842] env[62914]: _type = "Task" [ 988.031842] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.041058] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.099602] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6fe6e708-cf44-4f3f-80d0-d655562f16c3 tempest-MultipleCreateTestJSON-1768884473 tempest-MultipleCreateTestJSON-1768884473-project-member] Lock "13f2a615-aa95-411d-92f8-9ff1b6eba420" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.605s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.104481] env[62914]: DEBUG nova.network.neutron [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Successfully updated port: b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.262015] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073739} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.262556] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.263676] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ff60e4-f65a-43a0-8336-fb8d3f06402e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.277334] env[62914]: DEBUG oslo_vmware.api [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832323, 'name': PowerOnVM_Task, 'duration_secs': 1.168221} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.288807] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 988.288869] env[62914]: INFO nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Took 9.71 seconds to spawn the instance on the hypervisor. [ 988.289085] env[62914]: DEBUG nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 988.299698] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.300532] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1242b7-5599-4322-bbc5-2bbf1e758839 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.303680] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04626ef1-1f62-47f2-a80b-7d06c019a56f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.327914] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 988.327914] env[62914]: value = "task-4832330" [ 988.327914] env[62914]: _type = "Task" [ 988.327914] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.338254] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832330, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.417407] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.442523] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832328, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.507212] env[62914]: DEBUG nova.compute.manager [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 988.507454] env[62914]: DEBUG oslo_concurrency.lockutils [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.507670] env[62914]: DEBUG oslo_concurrency.lockutils [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.507844] env[62914]: DEBUG oslo_concurrency.lockutils [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.508026] env[62914]: DEBUG nova.compute.manager [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] No waiting events found dispatching network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 988.508204] env[62914]: WARNING nova.compute.manager [req-47efbad7-8266-441a-a74c-36761a0b0554 req-dd86f259-e937-4fea-8e72-645148b1da2e service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received unexpected event network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc for instance with vm_state active and task_state None. [ 988.545192] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832329, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.607441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.607652] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.607922] env[62914]: DEBUG nova.network.neutron [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 988.714761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.716032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.716032] env[62914]: INFO nova.compute.manager [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Rebooting instance [ 988.840382] env[62914]: INFO nova.compute.manager [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Took 44.73 seconds to build instance. [ 988.848290] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.943038] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832328, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.956804] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a97964e-6b4b-458f-b38c-664dd23b7cdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.967554] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a81f15-aaf2-4ef7-922e-ba8e15f2588e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.003310] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e250f5-4b97-4cc4-9e9b-0330d2357170 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.012289] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947e02c2-b821-4512-a473-bdb5668c90ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.027316] env[62914]: DEBUG nova.compute.provider_tree [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.042855] env[62914]: DEBUG oslo_vmware.api [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832329, 'name': PowerOnVM_Task, 'duration_secs': 0.96341} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.043174] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 989.043386] env[62914]: INFO nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Took 5.80 seconds to spawn the instance on the hypervisor. [ 989.043579] env[62914]: DEBUG nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 989.044397] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a031592d-60dc-4adb-bcb6-38c6ff625da6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.174556] env[62914]: WARNING nova.network.neutron [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 989.215520] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.215859] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.216103] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "b443050b-78ae-4f9d-81d4-508f5cf4a322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.216303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.216512] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.221189] env[62914]: INFO nova.compute.manager [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Terminating instance [ 989.223338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "refresh_cache-b443050b-78ae-4f9d-81d4-508f5cf4a322" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.223691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquired lock "refresh_cache-b443050b-78ae-4f9d-81d4-508f5cf4a322" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.223939] env[62914]: DEBUG nova.network.neutron [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.240375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.240606] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.240790] env[62914]: DEBUG nova.network.neutron [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.343091] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832330, 'name': ReconfigVM_Task, 'duration_secs': 0.664088} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.343336] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7/10102941-c31a-4ab1-be5a-801520d49fd7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.343975] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd7d151d-28b9-4a4b-a4f9-c9e0bbc9d209 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.350223] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9757ece4-a58a-495e-babb-d2bb865948d6 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.250s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.351993] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 989.351993] env[62914]: value = "task-4832331" [ 989.351993] env[62914]: _type = "Task" [ 989.351993] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.360851] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832331, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.445440] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832328, 'name': Rename_Task, 'duration_secs': 1.26979} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.445792] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 989.446500] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-935affb1-93da-42f5-a61e-ddd62179ac23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.452969] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 989.452969] env[62914]: value = "task-4832332" [ 989.452969] env[62914]: _type = "Task" [ 989.452969] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.461505] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.531030] env[62914]: DEBUG nova.scheduler.client.report [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.565443] env[62914]: INFO nova.compute.manager [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Took 40.13 seconds to build instance. [ 989.635247] env[62914]: DEBUG nova.network.neutron [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "address": "fa:16:3e:a6:45:00", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e109fe-6c", "ovs_interfaceid": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.747029] env[62914]: DEBUG nova.network.neutron [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 989.859463] env[62914]: DEBUG nova.network.neutron [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.864550] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832331, 'name': Rename_Task, 'duration_secs': 0.380109} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.864984] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 989.865262] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efb6ee37-7f5d-4d0c-8121-5b973f37c549 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.873670] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 989.873670] env[62914]: value = "task-4832333" [ 989.873670] env[62914]: _type = "Task" [ 989.873670] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.883142] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.965189] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832332, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.036851] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.037578] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 990.044708] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.355s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.046777] env[62914]: INFO nova.compute.claims [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.069022] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fad60dfb-3880-42db-8e50-455ba6a4c0d6 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.202s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.138460] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.139178] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.139414] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.140252] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1be4a3-bcc6-4ee6-895b-5d9f00b0912e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.163009] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.163300] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.163476] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.163670] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.163822] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.163975] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.164213] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.164393] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.164715] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.164831] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.165135] env[62914]: DEBUG nova.virt.hardware [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.173892] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfiguring VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 990.173892] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9090d2e-6e2c-4e9b-840d-ed4ecea9f174 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.195831] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 990.195831] env[62914]: value = "task-4832334" [ 990.195831] env[62914]: _type = "Task" [ 990.195831] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.208969] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832334, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.246793] env[62914]: DEBUG nova.network.neutron [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.366527] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Releasing lock "refresh_cache-b443050b-78ae-4f9d-81d4-508f5cf4a322" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.367108] env[62914]: DEBUG nova.compute.manager [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 990.367356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 990.368509] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb3e6eb-660b-4b45-96aa-4c81ad5f981f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.380537] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 990.381330] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c6b41b9-917e-4975-94e6-68918b5a7346 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.389567] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832333, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.391854] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 990.391854] env[62914]: value = "task-4832335" [ 990.391854] env[62914]: _type = "Task" [ 990.391854] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.403647] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.464234] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832332, 'name': PowerOnVM_Task, 'duration_secs': 0.838407} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.464552] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 990.464765] env[62914]: DEBUG nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 990.465600] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75938f64-79d0-4449-8893-af72bac7a529 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.553838] env[62914]: DEBUG nova.compute.utils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.557440] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 990.557440] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 990.607830] env[62914]: DEBUG nova.policy [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ea29d6698d4734a5def35fe065fe21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b59bf6daf8c246f7b034dc0adcfc8cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 990.709993] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.750744] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.755196] env[62914]: DEBUG nova.compute.manager [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 990.755196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bd1f0c-ad3a-4563-9288-fe508f188440 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.888911] env[62914]: DEBUG oslo_vmware.api [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832333, 'name': PowerOnVM_Task, 'duration_secs': 0.744278} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.888911] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 990.889191] env[62914]: DEBUG nova.compute.manager [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 990.890802] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22c25ff-a594-42da-b69e-cfe24c22a042 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.907030] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832335, 'name': PowerOffVM_Task, 'duration_secs': 0.221269} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.908857] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 990.909136] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 990.912081] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2deb4a8-9e80-45b6-9c25-f906c34b399b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.947033] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 990.947033] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 990.947033] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Deleting the datastore file [datastore1] b443050b-78ae-4f9d-81d4-508f5cf4a322 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.947033] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e934a4a5-298d-44f1-b6a4-f68d692ef1b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.951622] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for the task: (returnval){ [ 990.951622] env[62914]: value = "task-4832337" [ 990.951622] env[62914]: _type = "Task" [ 990.951622] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.965959] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.979552] env[62914]: INFO nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] bringing vm to original state: 'stopped' [ 991.012594] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Successfully created port: 91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.069183] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 991.209104] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.427416] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.466245] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.588197] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41725ef-cf8f-4eb8-9c19-8c3defee96de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.599273] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410ec35f-36f7-4156-a281-4cb4d70a3a74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.635434] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c60e82c-3aa9-4b0d-9ff2-36c4e4fb13c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.648075] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64dd877-8954-4bac-83cb-1addbac00e16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.672297] env[62914]: DEBUG nova.compute.provider_tree [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.714816] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.782530] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec2834-da77-4c8b-ab8d-ff5350ae571a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.794337] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Doing hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 991.794337] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-03d0aee8-77d4-4272-827a-7d25c5b2454c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.802589] env[62914]: DEBUG oslo_vmware.api [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 991.802589] env[62914]: value = "task-4832338" [ 991.802589] env[62914]: _type = "Task" [ 991.802589] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.817106] env[62914]: DEBUG oslo_vmware.api [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832338, 'name': ResetVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.875240] env[62914]: DEBUG nova.compute.manager [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 991.875643] env[62914]: DEBUG nova.compute.manager [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 991.876215] env[62914]: DEBUG oslo_concurrency.lockutils [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.876446] env[62914]: DEBUG oslo_concurrency.lockutils [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.876660] env[62914]: DEBUG nova.network.neutron [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 991.964860] env[62914]: DEBUG oslo_vmware.api [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Task: {'id': task-4832337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528266} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.965548] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.965548] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 991.965720] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.965941] env[62914]: INFO nova.compute.manager [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Took 1.60 seconds to destroy the instance on the hypervisor. [ 991.966459] env[62914]: DEBUG oslo.service.loopingcall [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.968022] env[62914]: DEBUG nova.compute.manager [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 991.968022] env[62914]: DEBUG nova.network.neutron [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.987352] env[62914]: DEBUG nova.network.neutron [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.995777] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.996057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.996253] env[62914]: DEBUG nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 991.997814] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd0cb4-3f0e-4600-9af2-b2398e327764 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.006665] env[62914]: DEBUG nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 992.009210] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 992.009802] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ab315c8-0119-417b-8800-fb450407cdf2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.018845] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 992.018845] env[62914]: value = "task-4832339" [ 992.018845] env[62914]: _type = "Task" [ 992.018845] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.030828] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.087217] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 992.120732] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.121026] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.121226] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.121422] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.121585] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.121745] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.121972] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.122161] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.122343] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.122521] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.122770] env[62914]: DEBUG nova.virt.hardware [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.123698] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb871da-5b73-4aa6-89ed-3a7d5c83ef5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.134453] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ef8ee0-45c8-4df7-96ca-4e12f1d909c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.176686] env[62914]: DEBUG nova.scheduler.client.report [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.211014] env[62914]: DEBUG oslo_vmware.api [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832334, 'name': ReconfigVM_Task, 'duration_secs': 1.914937} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.211831] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.212091] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfigured VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 992.313669] env[62914]: DEBUG oslo_vmware.api [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832338, 'name': ResetVM_Task, 'duration_secs': 0.119823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.314088] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Did hard reboot of VM {{(pid=62914) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 992.314330] env[62914]: DEBUG nova.compute.manager [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 992.315183] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04e7bc6-8d38-4fe5-90c4-0b30879240db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.403813] env[62914]: DEBUG nova.compute.manager [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 992.489959] env[62914]: DEBUG nova.network.neutron [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.532337] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.683069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.683069] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 992.685887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.561s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.688820] env[62914]: INFO nova.compute.claims [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.719383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3a86758-c552-45c2-9147-add321dea221 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.056s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.748931] env[62914]: DEBUG nova.network.neutron [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 992.749582] env[62914]: DEBUG nova.network.neutron [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "address": "fa:16:3e:a6:45:00", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e109fe-6c", "ovs_interfaceid": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.755612] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Successfully updated port: 91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.829784] env[62914]: DEBUG oslo_concurrency.lockutils [None req-862b14de-e354-4607-aae9-f227002a8552 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.115s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.932812] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.993269] env[62914]: INFO nova.compute.manager [-] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Took 1.03 seconds to deallocate network for instance. [ 993.033513] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.193445] env[62914]: DEBUG nova.compute.utils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.195159] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 993.195350] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 993.254261] env[62914]: DEBUG nova.policy [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 993.256378] env[62914]: DEBUG oslo_concurrency.lockutils [req-1c6a34a7-9dc0-4e21-91c9-c7e08fc6f0ba req-1c4259b0-a95f-4f39-b800-744d836d1e4d service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.264780] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.264988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.265178] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.502069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.538244] env[62914]: DEBUG oslo_vmware.api [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832339, 'name': PowerOffVM_Task, 'duration_secs': 1.279508} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.538244] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 993.538244] env[62914]: DEBUG nova.compute.manager [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 993.538244] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf95829a-c518-4213-9cfb-cfe130d3721d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.561185] env[62914]: DEBUG nova.compute.manager [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Received event network-vif-plugged-91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 993.561185] env[62914]: DEBUG oslo_concurrency.lockutils [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.561185] env[62914]: DEBUG oslo_concurrency.lockutils [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.561185] env[62914]: DEBUG oslo_concurrency.lockutils [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.561185] env[62914]: DEBUG nova.compute.manager [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] No waiting events found dispatching network-vif-plugged-91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 993.561185] env[62914]: WARNING nova.compute.manager [req-811e124e-5de4-4425-a13a-2226c080f109 req-d2c61c65-20fe-442f-b21f-0ff409b7ee5d service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Received unexpected event network-vif-plugged-91711c66-4bec-40d9-b1be-9603bbad7e46 for instance with vm_state building and task_state spawning. [ 993.672043] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Successfully created port: c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.699397] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 993.843768] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.067899] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.107198] env[62914]: DEBUG nova.network.neutron [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating instance_info_cache with network_info: [{"id": "91711c66-4bec-40d9-b1be-9603bbad7e46", "address": "fa:16:3e:97:22:3f", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91711c66-4b", "ovs_interfaceid": "91711c66-4bec-40d9-b1be-9603bbad7e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.241290] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886f0c3f-c4f2-4190-abb7-47bd46756026 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.258672] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7326324e-1d9f-47dc-ba1e-cbd0b540e468 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.295433] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c66cbf-bc00-4b9d-ae5c-2e05542a8b80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.304812] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b4f334-c59e-46b7-991e-f745181edcb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.326049] env[62914]: DEBUG nova.compute.provider_tree [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.580251] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.612688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.612688] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Instance network_info: |[{"id": "91711c66-4bec-40d9-b1be-9603bbad7e46", "address": "fa:16:3e:97:22:3f", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91711c66-4b", "ovs_interfaceid": "91711c66-4bec-40d9-b1be-9603bbad7e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 994.612688] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:22:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91711c66-4bec-40d9-b1be-9603bbad7e46', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.623272] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating folder: Project (b59bf6daf8c246f7b034dc0adcfc8cde). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 994.623808] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6895cb7-f3f5-44b2-9448-c4c29767076f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.646522] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created folder: Project (b59bf6daf8c246f7b034dc0adcfc8cde) in parent group-v941773. [ 994.646522] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating folder: Instances. Parent ref: group-v942023. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 994.646805] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5788a5aa-1d1a-4cdd-bdc9-1b7fd0f4462f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.662332] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created folder: Instances in parent group-v942023. [ 994.662332] env[62914]: DEBUG oslo.service.loopingcall [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.662332] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 994.662332] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8bba5e2-000c-43ab-8d9f-03e1db726e5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.691178] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.691178] env[62914]: value = "task-4832342" [ 994.691178] env[62914]: _type = "Task" [ 994.691178] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.706612] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832342, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.721904] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 994.757541] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.757739] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.757833] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.758282] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.758487] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.758951] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.759093] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.759406] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.759668] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.760310] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.760569] env[62914]: DEBUG nova.virt.hardware [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.761670] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3075156-e05d-4f5b-a2bf-967b7b27cbc6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.772413] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33879b1b-316a-4b3d-8c84-7f0915d79a61 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.829740] env[62914]: DEBUG nova.scheduler.client.report [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.140404] env[62914]: INFO nova.compute.manager [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Rebuilding instance [ 995.203439] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832342, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.212610] env[62914]: DEBUG nova.compute.manager [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 995.212610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04af759c-94ff-4c09-bbd9-d27b64e125c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.335916] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.336696] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 995.343220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.541s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.343220] env[62914]: DEBUG nova.objects.instance [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'resources' on Instance uuid b77a3d27-fe9f-49fc-95d1-15fe82762833 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.707076] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832342, 'name': CreateVM_Task, 'duration_secs': 0.821546} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.707381] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 995.708125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.708366] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.708717] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.709171] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97a93d1b-189f-4a84-91e9-cfd08b61f201 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.716159] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 995.716159] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e57e1-5bdb-b6f9-a372-776dc6d0d85a" [ 995.716159] env[62914]: _type = "Task" [ 995.716159] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.727960] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 995.732025] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e57e1-5bdb-b6f9-a372-776dc6d0d85a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.732025] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ff7c63c-a19e-42db-974e-c256663c2e51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.743037] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 995.743037] env[62914]: value = "task-4832343" [ 995.743037] env[62914]: _type = "Task" [ 995.743037] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.754138] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.791179] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Successfully updated port: c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.813563] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.814099] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.847626] env[62914]: DEBUG nova.compute.utils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.852773] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 995.853044] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 995.920822] env[62914]: DEBUG nova.policy [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5da18e2dc49746d8a7125efdc106d62b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd271710592bf47b79e16552221fe7107', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 996.013852] env[62914]: DEBUG nova.compute.manager [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Received event network-changed-91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 996.014130] env[62914]: DEBUG nova.compute.manager [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Refreshing instance network info cache due to event network-changed-91711c66-4bec-40d9-b1be-9603bbad7e46. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 996.014433] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] Acquiring lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.014608] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] Acquired lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.014776] env[62914]: DEBUG nova.network.neutron [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Refreshing network info cache for port 91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 996.238882] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523e57e1-5bdb-b6f9-a372-776dc6d0d85a, 'name': SearchDatastore_Task, 'duration_secs': 0.045823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.239155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.239418] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.239670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.239813] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.241341] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.241731] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f330f12c-0c99-4c7e-be29-4ace204601c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.260448] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832343, 'name': PowerOffVM_Task, 'duration_secs': 0.208718} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.261067] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 996.262152] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 996.263353] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.266360] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 996.267880] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815e9a2c-b6ee-4b0b-a938-c86b4e2d71e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.270765] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11a04de2-2c2d-4596-bc74-8f87a1910eef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.278586] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Successfully created port: 1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.287029] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 996.287029] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c2ea7f-19d1-1cc7-eb9b-edcb74e42b2d" [ 996.287029] env[62914]: _type = "Task" [ 996.287029] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.288976] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 996.296556] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8719c694-aed4-4797-9c28-4558dc265890 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.299739] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.299739] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.299739] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.308019] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c2ea7f-19d1-1cc7-eb9b-edcb74e42b2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.312376] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187cb585-d7d1-433b-bbcb-630e3866e624 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.323564] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.323564] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.325521] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb6b4fd-4aaf-4b1e-aeac-e44b29beecf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.328272] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a343d84-406f-4493-957c-ad47ed40e478 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.334666] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 996.335000] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 996.335147] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Deleting the datastore file [datastore2] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.336097] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-583cdf34-9abe-4e72-a1ef-a3032ad7fe78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.380398] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 996.385189] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfc44a4-4893-4d22-b624-573f619fc8e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.389647] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc9d43f-fe9b-4972-8465-1152c0c56a94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.394431] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 996.394431] env[62914]: value = "task-4832345" [ 996.394431] env[62914]: _type = "Task" [ 996.394431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.424617] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfiguring VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 996.426404] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b18af21a-a9ed-4371-a3fd-f0d811284444 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.441331] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9685a62d-38bc-4926-ac69-524bb82d7b8c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.449655] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.462884] env[62914]: DEBUG nova.compute.provider_tree [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.466284] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 996.466284] env[62914]: value = "task-4832346" [ 996.466284] env[62914]: _type = "Task" [ 996.466284] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.477228] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.492488] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.492712] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.492976] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.493647] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.493847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.496696] env[62914]: INFO nova.compute.manager [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Terminating instance [ 996.500095] env[62914]: DEBUG nova.compute.manager [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 996.500095] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 996.500507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4a8432-70c6-47b2-9420-581975353215 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.510175] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 996.510553] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12f74fbc-4dc3-400e-825e-762fb80d8400 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.600505] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 996.600505] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 996.600505] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore2] dac99ed2-aed9-4c3e-bcab-a8de9967990c {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.600505] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9956e05e-f16a-4b5f-b618-389ddec2e0fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.612608] env[62914]: DEBUG oslo_vmware.api [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 996.612608] env[62914]: value = "task-4832348" [ 996.612608] env[62914]: _type = "Task" [ 996.612608] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.628349] env[62914]: DEBUG oslo_vmware.api [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.800808] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c2ea7f-19d1-1cc7-eb9b-edcb74e42b2d, 'name': SearchDatastore_Task, 'duration_secs': 0.028278} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.801751] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d526dd6-3703-47ec-b45f-2cce83cd2dbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.813127] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 996.813127] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fbba8e-3b58-bc11-82be-3c5e1364bbb9" [ 996.813127] env[62914]: _type = "Task" [ 996.813127] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.821877] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fbba8e-3b58-bc11-82be-3c5e1364bbb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.845116] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.909378] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200017} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.911312] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.911312] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 996.911312] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 996.967814] env[62914]: DEBUG nova.scheduler.client.report [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.988833] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.042075] env[62914]: DEBUG nova.network.neutron [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updated VIF entry in instance network info cache for port 91711c66-4bec-40d9-b1be-9603bbad7e46. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 997.042404] env[62914]: DEBUG nova.network.neutron [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating instance_info_cache with network_info: [{"id": "91711c66-4bec-40d9-b1be-9603bbad7e46", "address": "fa:16:3e:97:22:3f", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91711c66-4b", "ovs_interfaceid": "91711c66-4bec-40d9-b1be-9603bbad7e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.058446] env[62914]: DEBUG nova.network.neutron [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updating instance_info_cache with network_info: [{"id": "c64b5774-e946-4217-a170-f93b64d5070b", "address": "fa:16:3e:c6:f5:e1", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64b5774-e9", "ovs_interfaceid": "c64b5774-e946-4217-a170-f93b64d5070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.126040] env[62914]: DEBUG oslo_vmware.api [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312392} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.126040] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.126040] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 997.126040] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 997.126040] env[62914]: INFO nova.compute.manager [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Took 0.63 seconds to destroy the instance on the hypervisor. [ 997.126040] env[62914]: DEBUG oslo.service.loopingcall [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.126040] env[62914]: DEBUG nova.compute.manager [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 997.126040] env[62914]: DEBUG nova.network.neutron [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 997.327842] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fbba8e-3b58-bc11-82be-3c5e1364bbb9, 'name': SearchDatastore_Task, 'duration_secs': 0.017885} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.327842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.327842] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 06e8b438-01ef-481f-8e27-2faa01bb97aa/06e8b438-01ef-481f-8e27-2faa01bb97aa.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 997.330240] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1fabc6e-4137-4251-8be6-246d462cc265 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.337984] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 997.337984] env[62914]: value = "task-4832349" [ 997.337984] env[62914]: _type = "Task" [ 997.337984] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.350321] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.391099] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 997.435981] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.436642] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.436642] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.436845] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.438185] env[62914]: DEBUG nova.virt.hardware [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.439103] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a748f75-b38c-41e6-940b-933d6bd446cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.449019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91671c93-cbdc-4b6f-8026-043422fcea13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.471727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.472148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.472504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.472793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.473071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.476653] env[62914]: INFO nova.compute.manager [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Terminating instance [ 997.478650] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.482187] env[62914]: DEBUG nova.compute.manager [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 997.482353] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 997.490469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.768s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.490469] env[62914]: DEBUG nova.objects.instance [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid af541b15-19ce-415a-b03e-cb605b780247 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.490469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305d2bb3-d459-4930-a8d0-9de9a6e517e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.503818] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.504122] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 997.505033] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffc89855-692a-4d68-b944-c1163811f832 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.513194] env[62914]: INFO nova.scheduler.client.report [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted allocations for instance b77a3d27-fe9f-49fc-95d1-15fe82762833 [ 997.515826] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 997.515826] env[62914]: value = "task-4832350" [ 997.515826] env[62914]: _type = "Task" [ 997.515826] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.532982] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.546033] env[62914]: DEBUG oslo_concurrency.lockutils [req-3a0ee40d-6378-47c3-8491-c23fc8b8a6b7 req-abaee9e7-b6be-42b6-aa20-ec0936e0292a service nova] Releasing lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.561595] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.561595] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance network_info: |[{"id": "c64b5774-e946-4217-a170-f93b64d5070b", "address": "fa:16:3e:c6:f5:e1", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64b5774-e9", "ovs_interfaceid": "c64b5774-e946-4217-a170-f93b64d5070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 997.563119] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:f5:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c64b5774-e946-4217-a170-f93b64d5070b', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.571031] env[62914]: DEBUG oslo.service.loopingcall [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.571205] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 997.571502] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a6f7deb-066c-411a-bd0c-94e5dba7c0ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.596177] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.596177] env[62914]: value = "task-4832351" [ 997.596177] env[62914]: _type = "Task" [ 997.596177] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.606767] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832351, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.612380] env[62914]: DEBUG nova.compute.manager [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 997.613409] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d974210-c6d2-4a37-9d37-5de5e22b0854 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.820937] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.821300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.858548] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832349, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.955672] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.956829] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.956829] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.956829] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.956829] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.956829] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.957448] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.957448] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.957448] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.957667] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.957891] env[62914]: DEBUG nova.virt.hardware [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.959303] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba466d15-6645-4c53-9d9b-7c69a48b4d26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.969015] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71328756-3cc3-4add-a30e-999d5d5b01f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.987909] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.994391] env[62914]: DEBUG oslo.service.loopingcall [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.001090] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 998.001742] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd6ee300-f0fa-44ce-86ef-1a2f484d5cf1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.015404] env[62914]: DEBUG nova.network.neutron [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.027980] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.028556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d1f53c5e-ef95-4491-a7cd-1279aaf21bb0 tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "b77a3d27-fe9f-49fc-95d1-15fe82762833" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.220s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.034410] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.034410] env[62914]: value = "task-4832352" [ 998.034410] env[62914]: _type = "Task" [ 998.034410] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.046206] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832350, 'name': PowerOffVM_Task, 'duration_secs': 0.303121} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.047180] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 998.047540] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 998.049553] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7df5f450-9339-4079-ba25-dc9bf68e8756 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.056431] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832352, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.063738] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Successfully updated port: 1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.118938] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832351, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.126555] env[62914]: INFO nova.compute.manager [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] instance snapshotting [ 998.130214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbab1d04-6e02-4f8c-b812-66ea5065652e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.137740] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 998.139096] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 998.139594] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore1] d9476d24-fbc5-4e30-bf67-85c388e943fd {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.156779] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa655318-166e-4d7b-b23a-e284a8b10684 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.162517] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5742f971-6c1f-4ea2-bea8-3d7b0b4468be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.170426] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received event network-vif-plugged-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 998.170426] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.170426] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.170731] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.170991] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] No waiting events found dispatching network-vif-plugged-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 998.171272] env[62914]: WARNING nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received unexpected event network-vif-plugged-c64b5774-e946-4217-a170-f93b64d5070b for instance with vm_state building and task_state spawning. [ 998.171517] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received event network-changed-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 998.172039] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Refreshing instance network info cache due to event network-changed-c64b5774-e946-4217-a170-f93b64d5070b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 998.172156] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Acquiring lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.172412] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Acquired lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.174542] env[62914]: DEBUG nova.network.neutron [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Refreshing network info cache for port c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 998.177191] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 998.177191] env[62914]: value = "task-4832354" [ 998.177191] env[62914]: _type = "Task" [ 998.177191] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.200836] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.324300] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 998.357549] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832349, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773456} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.357549] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 06e8b438-01ef-481f-8e27-2faa01bb97aa/06e8b438-01ef-481f-8e27-2faa01bb97aa.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 998.357549] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.357549] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-474ad2bc-dbf3-4e70-8f49-c6fe9ee4510a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.366069] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 998.366069] env[62914]: value = "task-4832355" [ 998.366069] env[62914]: _type = "Task" [ 998.366069] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.379133] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.502165] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.530633] env[62914]: INFO nova.compute.manager [-] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Took 1.41 seconds to deallocate network for instance. [ 998.547724] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832352, 'name': CreateVM_Task, 'duration_secs': 0.509323} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.547835] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 998.548338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.548639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.549038] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 998.549288] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a38b785-48d6-4d8f-bf8e-16ea70eb2aa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.556042] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 998.556042] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c662fc-cfac-0c7d-d73f-5694c6ef6ee6" [ 998.556042] env[62914]: _type = "Task" [ 998.556042] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.564897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.565344] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.565344] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 998.572133] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c662fc-cfac-0c7d-d73f-5694c6ef6ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.586390] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37da230f-b6cb-4534-b1bc-e3af7ef746bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.598579] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5010293-0fe2-4581-a6e5-7963c3388bef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.614785] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832351, 'name': CreateVM_Task, 'duration_secs': 0.859404} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.640238] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 998.641710] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.642528] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2b992c-fc8c-43d0-b7ec-cd440e712408 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.651525] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe7ba20-249c-4a83-b87e-3b647d73738e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.669531] env[62914]: DEBUG nova.compute.provider_tree [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.693321] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 998.693940] env[62914]: DEBUG oslo_vmware.api [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.694080] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-61b2bf35-b0be-4ed1-b30d-46ea1f1712fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.696379] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.696577] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 998.696777] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 998.696959] env[62914]: INFO nova.compute.manager [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Took 1.21 seconds to destroy the instance on the hypervisor. [ 998.697245] env[62914]: DEBUG oslo.service.loopingcall [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.697501] env[62914]: DEBUG nova.compute.manager [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 998.697614] env[62914]: DEBUG nova.network.neutron [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 998.705681] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 998.705681] env[62914]: value = "task-4832356" [ 998.705681] env[62914]: _type = "Task" [ 998.705681] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.718374] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832356, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.858106] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.878641] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.152512} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.878641] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.878641] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26df7f72-82e8-4219-9b1c-db6752d04e9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.906058] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 06e8b438-01ef-481f-8e27-2faa01bb97aa/06e8b438-01ef-481f-8e27-2faa01bb97aa.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.906438] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-824895a9-6744-4410-aa05-18b67230d5d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.931372] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 998.931372] env[62914]: value = "task-4832357" [ 998.931372] env[62914]: _type = "Task" [ 998.931372] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.938669] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.000211] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.039788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.071116] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c662fc-cfac-0c7d-d73f-5694c6ef6ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.019632} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.072198] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.072292] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.072572] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.072713] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.072896] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.073694] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.074052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.074323] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7c8994a-fc52-4753-a962-72ec95b6b320 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.078516] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-244b4df7-ebae-4d6c-8c06-9153a5fb833f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.085379] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 999.085379] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5290f998-a0c1-d578-cca3-5be222eca4ec" [ 999.085379] env[62914]: _type = "Task" [ 999.085379] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.091706] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.091706] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 999.095125] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b0d54e5-9c8a-4acf-b184-0827eb5ae578 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.097708] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5290f998-a0c1-d578-cca3-5be222eca4ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.103188] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 999.103188] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522cb854-7f32-63f4-4692-401a1bb6b10f" [ 999.103188] env[62914]: _type = "Task" [ 999.103188] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.115815] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522cb854-7f32-63f4-4692-401a1bb6b10f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.121590] env[62914]: DEBUG nova.network.neutron [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updated VIF entry in instance network info cache for port c64b5774-e946-4217-a170-f93b64d5070b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 999.122242] env[62914]: DEBUG nova.network.neutron [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updating instance_info_cache with network_info: [{"id": "c64b5774-e946-4217-a170-f93b64d5070b", "address": "fa:16:3e:c6:f5:e1", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64b5774-e9", "ovs_interfaceid": "c64b5774-e946-4217-a170-f93b64d5070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.147638] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 999.173107] env[62914]: DEBUG nova.scheduler.client.report [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.220961] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832356, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.229828] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.230085] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.440131] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.459042] env[62914]: DEBUG nova.network.neutron [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Updating instance_info_cache with network_info: [{"id": "1559da30-bfec-4f82-9d1e-605294200ff3", "address": "fa:16:3e:51:87:69", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1559da30-bf", "ovs_interfaceid": "1559da30-bfec-4f82-9d1e-605294200ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.498698] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.596589] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5290f998-a0c1-d578-cca3-5be222eca4ec, 'name': SearchDatastore_Task, 'duration_secs': 0.021636} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.596930] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.597234] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.597412] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.613712] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522cb854-7f32-63f4-4692-401a1bb6b10f, 'name': SearchDatastore_Task, 'duration_secs': 0.025051} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.614543] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6f49fcd-1412-4c42-8810-895e6e8b9723 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.620252] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 999.620252] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52868a4a-6d38-72d5-23c5-10fcdc954cf4" [ 999.620252] env[62914]: _type = "Task" [ 999.620252] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.625201] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Releasing lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.625540] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Received event network-vif-deleted-b2fe080d-6273-4a2c-b4dc-2d9ec37d4161 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 999.625818] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Received event network-vif-plugged-1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 999.626063] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Acquiring lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.626282] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.626453] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] Lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.626627] env[62914]: DEBUG nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] No waiting events found dispatching network-vif-plugged-1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 999.626798] env[62914]: WARNING nova.compute.manager [req-73ca79d0-ba00-4e1b-8e15-1b6796ac6525 req-bfca0717-5942-4bec-ba00-20fab27b2b03 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Received unexpected event network-vif-plugged-1559da30-bfec-4f82-9d1e-605294200ff3 for instance with vm_state building and task_state spawning. [ 999.630234] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52868a4a-6d38-72d5-23c5-10fcdc954cf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.678389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.192s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.681736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 35.243s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.685997] env[62914]: DEBUG nova.network.neutron [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.722548] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832356, 'name': CreateSnapshot_Task, 'duration_secs': 0.577583} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.722843] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 999.723668] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2125633-303a-4221-a127-c6bc3e24f73d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.736331] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 999.740841] env[62914]: INFO nova.scheduler.client.report [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance af541b15-19ce-415a-b03e-cb605b780247 [ 999.940848] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832357, 'name': ReconfigVM_Task, 'duration_secs': 0.520367} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.941193] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 06e8b438-01ef-481f-8e27-2faa01bb97aa/06e8b438-01ef-481f-8e27-2faa01bb97aa.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.941880] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18209d3d-22ca-41d6-8109-c7c76d3b223e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.949860] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 999.949860] env[62914]: value = "task-4832358" [ 999.949860] env[62914]: _type = "Task" [ 999.949860] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.959325] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832358, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.962908] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.962908] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Instance network_info: |[{"id": "1559da30-bfec-4f82-9d1e-605294200ff3", "address": "fa:16:3e:51:87:69", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1559da30-bf", "ovs_interfaceid": "1559da30-bfec-4f82-9d1e-605294200ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 999.962908] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:87:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1559da30-bfec-4f82-9d1e-605294200ff3', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.970762] env[62914]: DEBUG oslo.service.loopingcall [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.971055] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 999.971404] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88f82cfc-5b9c-4768-a1e1-726378b35f84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.994227] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.994227] env[62914]: value = "task-4832359" [ 999.994227] env[62914]: _type = "Task" [ 999.994227] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.001585] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.007417] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832359, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.136311] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52868a4a-6d38-72d5-23c5-10fcdc954cf4, 'name': SearchDatastore_Task, 'duration_secs': 0.017161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.136660] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.136951] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1000.137274] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.137472] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.137708] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f784157a-8644-4923-ad5e-448cd7bdc486 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.141082] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcc48d29-caad-4c19-9aa4-11eeb9ea9b06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.151058] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1000.151058] env[62914]: value = "task-4832360" [ 1000.151058] env[62914]: _type = "Task" [ 1000.151058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.156795] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.158511] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1000.158511] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d26fdbf6-2fc4-45a6-821d-8ee90aa20fd0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.165630] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.171443] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1000.171443] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e53b3c-7b30-adf7-57b5-2f65b72277e2" [ 1000.171443] env[62914]: _type = "Task" [ 1000.171443] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.185032] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e53b3c-7b30-adf7-57b5-2f65b72277e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.194364] env[62914]: INFO nova.compute.manager [-] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Took 1.50 seconds to deallocate network for instance. [ 1000.227607] env[62914]: DEBUG nova.compute.manager [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Received event network-changed-1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1000.227929] env[62914]: DEBUG nova.compute.manager [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Refreshing instance network info cache due to event network-changed-1559da30-bfec-4f82-9d1e-605294200ff3. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1000.228296] env[62914]: DEBUG oslo_concurrency.lockutils [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] Acquiring lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.228403] env[62914]: DEBUG oslo_concurrency.lockutils [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] Acquired lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.228600] env[62914]: DEBUG nova.network.neutron [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Refreshing network info cache for port 1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1000.252455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1000.255602] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6ce60f38-4846-42c6-a5be-d0c844af37f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.259229] env[62914]: DEBUG oslo_concurrency.lockutils [None req-274b67dc-b680-4ade-8a7e-12c27f45fdb8 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "af541b15-19ce-415a-b03e-cb605b780247" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.547s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.267774] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1000.267774] env[62914]: value = "task-4832361" [ 1000.267774] env[62914]: _type = "Task" [ 1000.267774] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.274518] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.281645] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832361, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.460729] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832358, 'name': Rename_Task, 'duration_secs': 0.279713} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.461142] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1000.461455] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21c1247e-6c4c-4d84-ad64-25f8cacb1612 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.470879] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1000.470879] env[62914]: value = "task-4832362" [ 1000.470879] env[62914]: _type = "Task" [ 1000.470879] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.480446] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.500050] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.509308] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832359, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.665066] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832360, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.687044] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e53b3c-7b30-adf7-57b5-2f65b72277e2, 'name': SearchDatastore_Task, 'duration_secs': 0.016189} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.687891] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0736e041-0d9a-454d-904b-2e7a581bfa75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.700754] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating resource usage from migration 8803623a-ecb1-4ba5-be89-a9d239df5334 [ 1000.701209] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating resource usage from migration b9c9eba1-369b-4782-a7b7-d155b21c313d [ 1000.704676] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1000.704676] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526cb352-02d1-ae51-7a7b-83ec5cafc425" [ 1000.704676] env[62914]: _type = "Task" [ 1000.704676] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.706427] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.720751] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526cb352-02d1-ae51-7a7b-83ec5cafc425, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.739685] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e061304c-998b-4331-b60d-809916844a6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.739893] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 10102941-c31a-4ab1-be5a-801520d49fd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.740049] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4911baea-15df-46db-be11-fcf998eb0cb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.740263] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 12e8b0ac-0dec-4928-ae65-ab53992ecab5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.740509] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 455965de-816d-4ab2-9d5e-a12b06893e6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.740727] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.740904] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance af141439-1c36-4184-9775-d1e30ee77ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.741132] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 55192659-4d65-4e74-a47f-46d650b6b095 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.741472] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bdec185e-2af7-4379-8c67-03e125750bb4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.741645] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance aedc785f-619f-4b9f-850f-790f84e57577 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.741836] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance dc99b470-4334-408d-8853-d2e9b9204d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.741964] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance d9476d24-fbc5-4e30-bf67-85c388e943fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.742143] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance dac99ed2-aed9-4c3e-bcab-a8de9967990c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.742407] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b443050b-78ae-4f9d-81d4-508f5cf4a322 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.742518] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance fa33e1a5-677a-489c-8c89-a33066b18103 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.742662] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.742822] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.742983] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 06e8b438-01ef-481f-8e27-2faa01bb97aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.743167] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 60169fa7-3266-4105-b17b-f3677ed2c443 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.743342] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 79c7728a-0452-44ec-91de-62e3f09f9183 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.743485] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration b9c9eba1-369b-4782-a7b7-d155b21c313d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1000.743605] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance ec73b924-e132-44b6-bc67-2b3c08592f03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.743719] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration 8803623a-ecb1-4ba5-be89-a9d239df5334 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1000.743832] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 120fa16e-60cd-4326-b6c4-f1df419dbcb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1000.785103] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832361, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.984274] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.001929] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.012038] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832359, 'name': CreateVM_Task, 'duration_secs': 0.806518} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.014684] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1001.015563] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.015744] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.016098] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1001.016732] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f25b3ad-3059-4239-bc78-c061afbb3a6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.023516] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1001.023516] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216fe10-e2b7-558e-2c44-ab159645f1fd" [ 1001.023516] env[62914]: _type = "Task" [ 1001.023516] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.035866] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216fe10-e2b7-558e-2c44-ab159645f1fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.162283] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787817} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.162614] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1001.162839] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.163124] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c5ddad9-1175-4635-943f-150233fd87ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.170017] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1001.170017] env[62914]: value = "task-4832363" [ 1001.170017] env[62914]: _type = "Task" [ 1001.170017] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.178773] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.218090] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526cb352-02d1-ae51-7a7b-83ec5cafc425, 'name': SearchDatastore_Task, 'duration_secs': 0.068865} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.218910] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.218910] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 60169fa7-3266-4105-b17b-f3677ed2c443/60169fa7-3266-4105-b17b-f3677ed2c443.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1001.219202] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b474e60-3846-4e76-977a-d8217169aaef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.226543] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1001.226543] env[62914]: value = "task-4832364" [ 1001.226543] env[62914]: _type = "Task" [ 1001.226543] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.240243] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.247350] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1001.256559] env[62914]: DEBUG nova.network.neutron [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Updated VIF entry in instance network info cache for port 1559da30-bfec-4f82-9d1e-605294200ff3. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1001.257145] env[62914]: DEBUG nova.network.neutron [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Updating instance_info_cache with network_info: [{"id": "1559da30-bfec-4f82-9d1e-605294200ff3", "address": "fa:16:3e:51:87:69", "network": {"id": "eceea83b-9c7a-49cb-b77f-f233a6b3231c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1825415238-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d271710592bf47b79e16552221fe7107", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1559da30-bf", "ovs_interfaceid": "1559da30-bfec-4f82-9d1e-605294200ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.283273] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832361, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.486993] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.503441] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.535977] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216fe10-e2b7-558e-2c44-ab159645f1fd, 'name': SearchDatastore_Task, 'duration_secs': 0.017506} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.536472] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.536833] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.537487] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.537487] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.537487] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.537806] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbaf5629-724f-4e67-a278-fd9fc1b75fc5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.555935] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.556248] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1001.557420] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebeeee4e-fb5d-4629-b129-38bbedf9be35 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.565411] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1001.565411] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5234cb53-5bc4-aeff-9320-5ca550e917bd" [ 1001.565411] env[62914]: _type = "Task" [ 1001.565411] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.575086] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5234cb53-5bc4-aeff-9320-5ca550e917bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.680626] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113059} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.681040] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.682010] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4036664a-a8d9-4c9a-a1fb-26a1b53ec4b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.706632] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.706999] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59562925-67d0-40b6-a2ba-3f71c5839469 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.733243] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1001.733243] env[62914]: value = "task-4832365" [ 1001.733243] env[62914]: _type = "Task" [ 1001.733243] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.741487] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832364, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.748642] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832365, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.750706] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1001.751071] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1001.751385] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3968MB phys_disk=100GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '18', 'num_vm_active': '15', 'num_task_None': '10', 'num_os_type_None': '18', 'num_proj_b19293a423174c20963c000441db100e': '1', 'io_workload': '5', 'num_proj_adf406f1352240aba2338e64b8f182b4': '1', 'num_proj_5adc4dc554ed4fe69f214161fd8ab9b9': '4', 'num_proj_3ffdaa966ecb4979845fda7778c7fb45': '1', 'num_proj_d141c01c1d5848eea6ef2b831e431ba5': '1', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '1', 'num_proj_2562164f04b045a59b3b501d2b0014ec': '2', 'num_task_resize_prep': '2', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1', 'num_task_deleting': '1', 'num_task_image_pending_upload': '1', 'num_proj_78ce97bf0a6a4b65b3cd1e316989a1ed': '1', 'num_proj_14ea39ac6e2d400ca89bbffc20d764ef': '1', 'num_task_rebuild_spawning': '1', 'num_proj_450b199fddd049348541e7a6d92f6a67': '1', 'num_vm_building': '3', 'num_task_spawning': '3', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '1', 'num_proj_4860bec4a28e4289b7a508f007fff452': '1', 'num_proj_d271710592bf47b79e16552221fe7107': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1001.759852] env[62914]: DEBUG oslo_concurrency.lockutils [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] Releasing lock "refresh_cache-79c7728a-0452-44ec-91de-62e3f09f9183" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.760194] env[62914]: DEBUG nova.compute.manager [req-d31ad6a8-ae25-47fa-b7db-3e7b24c83255 req-f58c79af-2e6e-4368-b631-1ec5e1938b70 service nova] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Received event network-vif-deleted-8963aef9-1731-4bd4-b659-83eb9724f8f9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1001.780786] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832361, 'name': CloneVM_Task, 'duration_secs': 1.448372} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.784469] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Created linked-clone VM from snapshot [ 1001.785743] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813042c9-07a1-49bf-a090-28929e1228ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.800024] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Uploading image b235be8d-55b7-4ed0-ba00-323d975a0a11 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1001.820874] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1001.821223] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0a710527-731a-4ea5-986e-9c9b2ffb3b46 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.834141] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1001.834141] env[62914]: value = "task-4832366" [ 1001.834141] env[62914]: _type = "Task" [ 1001.834141] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.852593] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832366, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.968636] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.968636] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.986386] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.005620] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.076341] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5234cb53-5bc4-aeff-9320-5ca550e917bd, 'name': SearchDatastore_Task, 'duration_secs': 0.048826} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.080033] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c7b8f4-1fc0-4a55-88ca-4a41088798a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.086701] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1002.086701] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad9c17-25e4-0e3c-3b95-a74f9e86d58a" [ 1002.086701] env[62914]: _type = "Task" [ 1002.086701] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.097339] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad9c17-25e4-0e3c-3b95-a74f9e86d58a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.225252] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c619a5-e8ed-449d-8e54-3cbbed84e3d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.237409] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650677} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.242346] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 60169fa7-3266-4105-b17b-f3677ed2c443/60169fa7-3266-4105-b17b-f3677ed2c443.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1002.242592] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.242900] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de815e04-8d40-415d-86ae-27299ef88468 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.245701] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd026509-f351-468a-9987-6379f08687ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.255373] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.281919] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1002.281919] env[62914]: value = "task-4832367" [ 1002.281919] env[62914]: _type = "Task" [ 1002.281919] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.282800] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83452d24-852f-4a11-9ab7-5f4c859d0ec5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.296026] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832367, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.297419] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c40a981-290f-4c73-a898-57e5bbc2cbae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.313040] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.346086] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832366, 'name': Destroy_Task, 'duration_secs': 0.343318} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.346421] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Destroyed the VM [ 1002.346676] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1002.346954] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e07349ed-7e49-4483-a26b-f2edb17fa83e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.355031] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1002.355031] env[62914]: value = "task-4832368" [ 1002.355031] env[62914]: _type = "Task" [ 1002.355031] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.367150] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832368, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.470914] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1002.484159] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.503355] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.599263] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ad9c17-25e4-0e3c-3b95-a74f9e86d58a, 'name': SearchDatastore_Task, 'duration_secs': 0.026001} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.599573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.599854] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 79c7728a-0452-44ec-91de-62e3f09f9183/79c7728a-0452-44ec-91de-62e3f09f9183.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1002.600133] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb4bb6b6-1bf1-4576-b7f3-cf6753b4c8a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.607048] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1002.607048] env[62914]: value = "task-4832369" [ 1002.607048] env[62914]: _type = "Task" [ 1002.607048] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.615977] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.748593] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.798765] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832367, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.815912] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.867765] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832368, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.990148] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.000670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.004809] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.118116] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.248397] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.296803] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832367, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.322796] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1003.323060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.642s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.323376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.042s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.323668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.325758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.348s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.325960] env[62914]: DEBUG nova.objects.instance [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1003.349850] env[62914]: INFO nova.scheduler.client.report [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Deleted allocations for instance bdec185e-2af7-4379-8c67-03e125750bb4 [ 1003.369991] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832368, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.490324] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.505733] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.618673] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.753130] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832365, 'name': ReconfigVM_Task, 'duration_secs': 1.981238} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.753660] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Reconfigured VM instance instance-0000005b to attach disk [datastore1] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1/dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.754552] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b48eb343-f94f-4167-aaed-fc83ee4b0409 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.763411] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1003.763411] env[62914]: value = "task-4832370" [ 1003.763411] env[62914]: _type = "Task" [ 1003.763411] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.772908] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832370, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.797549] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832367, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.376837} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.797898] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.798959] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d88a7cc-cb8c-448d-8702-dec9be28af73 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.833425] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 60169fa7-3266-4105-b17b-f3677ed2c443/60169fa7-3266-4105-b17b-f3677ed2c443.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.837824] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad86c2a9-5307-48f8-9481-4ea9e0940a5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.865181] env[62914]: DEBUG oslo_concurrency.lockutils [None req-54fd57da-29b4-4d5c-bebe-32f604c9cf7c tempest-ServerShowV247Test-1836252768 tempest-ServerShowV247Test-1836252768-project-member] Lock "bdec185e-2af7-4379-8c67-03e125750bb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.327s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.877027] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1003.877027] env[62914]: value = "task-4832371" [ 1003.877027] env[62914]: _type = "Task" [ 1003.877027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.884543] env[62914]: DEBUG oslo_vmware.api [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832368, 'name': RemoveSnapshot_Task, 'duration_secs': 1.099312} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.885537] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1003.894878] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832371, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.990906] env[62914]: DEBUG oslo_vmware.api [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832362, 'name': PowerOnVM_Task, 'duration_secs': 3.458991} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.991431] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1003.992210] env[62914]: INFO nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Took 11.90 seconds to spawn the instance on the hypervisor. [ 1003.992210] env[62914]: DEBUG nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1003.992769] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6030a7c6-a557-4c30-bbcd-06e4ef93a4f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.015194] env[62914]: DEBUG oslo_vmware.api [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832346, 'name': ReconfigVM_Task, 'duration_secs': 7.403878} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.015194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.015194] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Reconfigured VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1004.123454] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832369, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.277420] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832370, 'name': Rename_Task, 'duration_secs': 0.235146} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.277792] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1004.280747] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-360dbbb4-66a5-4405-963e-695b9ca5a22d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.288602] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1004.288602] env[62914]: value = "task-4832372" [ 1004.288602] env[62914]: _type = "Task" [ 1004.288602] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.298581] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.339462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-438e4dfb-8321-4908-bdda-8529b12805b1 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.340786] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.757s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.384346] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832371, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.397614] env[62914]: WARNING nova.compute.manager [None req-16bb505f-c5d9-467e-b5de-05ba889b2dcf tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Image not found during snapshot: nova.exception.ImageNotFound: Image b235be8d-55b7-4ed0-ba00-323d975a0a11 could not be found. [ 1004.523966] env[62914]: INFO nova.compute.manager [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Took 47.06 seconds to build instance. [ 1004.619159] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832369, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.666869} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.619598] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 79c7728a-0452-44ec-91de-62e3f09f9183/79c7728a-0452-44ec-91de-62e3f09f9183.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1004.619840] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.620140] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cd52479-6cf1-4a22-94d5-3bde9c55fd9f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.629281] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1004.629281] env[62914]: value = "task-4832373" [ 1004.629281] env[62914]: _type = "Task" [ 1004.629281] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.639240] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.800606] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.845513] env[62914]: INFO nova.compute.claims [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.886244] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832371, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.026294] env[62914]: DEBUG oslo_concurrency.lockutils [None req-079021ab-1ee3-42e5-807d-ce91e16a4550 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.577s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.143104] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099787} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.144026] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.144970] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e82ac00-e2d7-4a7d-9027-c737177fb348 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.174195] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 79c7728a-0452-44ec-91de-62e3f09f9183/79c7728a-0452-44ec-91de-62e3f09f9183.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.174546] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4b71c3c-54e1-4517-9cba-5b8168d8385e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.197370] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1005.197370] env[62914]: value = "task-4832374" [ 1005.197370] env[62914]: _type = "Task" [ 1005.197370] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.209975] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832374, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.302101] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.352381] env[62914]: INFO nova.compute.resource_tracker [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating resource usage from migration b9c9eba1-369b-4782-a7b7-d155b21c313d [ 1005.387230] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832371, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.572556] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.574029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.574029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.574029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.574029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.576445] env[62914]: INFO nova.compute.manager [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Terminating instance [ 1005.578511] env[62914]: DEBUG nova.compute.manager [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1005.578702] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1005.579583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f9e8e2-d5e2-4f54-81cd-2e61f7471d51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.592246] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1005.592246] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-619efa0d-4636-42a6-9ccf-4dd58cbb707e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.603027] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1005.603027] env[62914]: value = "task-4832375" [ 1005.603027] env[62914]: _type = "Task" [ 1005.603027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.613144] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.712468] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832374, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.772253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.772405] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.772611] env[62914]: DEBUG nova.network.neutron [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1005.802443] env[62914]: DEBUG oslo_vmware.api [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832372, 'name': PowerOnVM_Task, 'duration_secs': 1.2214} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.802751] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1005.802979] env[62914]: DEBUG nova.compute.manager [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1005.804246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a39654-5d50-4e7c-b318-409c5f3655c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.888769] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832371, 'name': ReconfigVM_Task, 'duration_secs': 1.817111} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.892501] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 60169fa7-3266-4105-b17b-f3677ed2c443/60169fa7-3266-4105-b17b-f3677ed2c443.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.893560] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c84f5f05-8084-41c1-91e7-41d682a0d106 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.900964] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1005.900964] env[62914]: value = "task-4832376" [ 1005.900964] env[62914]: _type = "Task" [ 1005.900964] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.912512] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832376, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.916124] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef514da-4c3e-461f-bd3c-ed30dcb2ac2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.926350] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6fd2d8-e2ed-4756-b402-93a63db3a96a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.961057] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90903082-8c73-4c79-9d9f-7d6f033905b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.969650] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe99426-88b9-4909-b2cb-1542a6af3bbf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.987130] env[62914]: DEBUG nova.compute.provider_tree [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.113955] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832375, 'name': PowerOffVM_Task, 'duration_secs': 0.228571} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.114384] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1006.114451] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1006.117760] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a7c4cbb-f9b7-44bb-ba2b-185656b1a52a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.185921] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1006.185921] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1006.185921] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleting the datastore file [datastore2] b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.185921] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0fad47b-c8fd-4a07-8952-692295f244fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.191669] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1006.191669] env[62914]: value = "task-4832378" [ 1006.191669] env[62914]: _type = "Task" [ 1006.191669] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.201080] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.209817] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832374, 'name': ReconfigVM_Task, 'duration_secs': 0.622908} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.210309] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 79c7728a-0452-44ec-91de-62e3f09f9183/79c7728a-0452-44ec-91de-62e3f09f9183.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.211050] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99da1695-614a-40c3-a5e8-f9b8c0188b6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.219246] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1006.219246] env[62914]: value = "task-4832379" [ 1006.219246] env[62914]: _type = "Task" [ 1006.219246] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.230038] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832379, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.324620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.412157] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832376, 'name': Rename_Task, 'duration_secs': 0.369467} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.412491] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1006.412765] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44310a2d-a609-48c6-807b-5e044e929522 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.423963] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1006.423963] env[62914]: value = "task-4832380" [ 1006.423963] env[62914]: _type = "Task" [ 1006.423963] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.435082] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.488964] env[62914]: DEBUG nova.scheduler.client.report [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1006.657565] env[62914]: DEBUG nova.compute.manager [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Received event network-changed-91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1006.657780] env[62914]: DEBUG nova.compute.manager [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Refreshing instance network info cache due to event network-changed-91711c66-4bec-40d9-b1be-9603bbad7e46. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1006.658337] env[62914]: DEBUG oslo_concurrency.lockutils [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] Acquiring lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.658663] env[62914]: DEBUG oslo_concurrency.lockutils [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] Acquired lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.658756] env[62914]: DEBUG nova.network.neutron [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Refreshing network info cache for port 91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1006.708601] env[62914]: DEBUG oslo_vmware.api [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170285} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.708988] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.709084] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1006.709247] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1006.709529] env[62914]: INFO nova.compute.manager [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1006.709724] env[62914]: DEBUG oslo.service.loopingcall [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.710299] env[62914]: DEBUG nova.compute.manager [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1006.710299] env[62914]: DEBUG nova.network.neutron [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1006.739524] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832379, 'name': Rename_Task, 'duration_secs': 0.27302} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.739963] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1006.740169] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c895cc3-ce2a-42b6-aed1-ed412a4484a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.750153] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1006.750153] env[62914]: value = "task-4832381" [ 1006.750153] env[62914]: _type = "Task" [ 1006.750153] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.761268] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.816978] env[62914]: INFO nova.network.neutron [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1006.816978] env[62914]: DEBUG nova.network.neutron [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.936974] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832380, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.997463] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.654s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.997463] env[62914]: INFO nova.compute.manager [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Migrating [ 1007.003034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.586s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.003614] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.005972] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.579s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.006370] env[62914]: DEBUG nova.objects.instance [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1007.022372] env[62914]: DEBUG nova.compute.manager [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1007.022641] env[62914]: DEBUG nova.compute.manager [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing instance network info cache due to event network-changed-24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1007.022840] env[62914]: DEBUG oslo_concurrency.lockutils [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] Acquiring lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.050605] env[62914]: INFO nova.scheduler.client.report [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Deleted allocations for instance 12e8b0ac-0dec-4928-ae65-ab53992ecab5 [ 1007.123113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.123514] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.123820] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.124116] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.124375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.128103] env[62914]: INFO nova.compute.manager [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Terminating instance [ 1007.130652] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "refresh_cache-dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.130896] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquired lock "refresh_cache-dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.131169] env[62914]: DEBUG nova.network.neutron [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.263337] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832381, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.320304] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.326022] env[62914]: DEBUG oslo_concurrency.lockutils [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] Acquired lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.326022] env[62914]: DEBUG nova.network.neutron [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Refreshing network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1007.438793] env[62914]: DEBUG oslo_vmware.api [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832380, 'name': PowerOnVM_Task, 'duration_secs': 0.748468} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.439167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1007.439425] env[62914]: INFO nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Took 12.72 seconds to spawn the instance on the hypervisor. [ 1007.439731] env[62914]: DEBUG nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1007.440579] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9619b42-eb5a-49d2-9469-236f6e144c56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.450734] env[62914]: WARNING oslo_messaging._drivers.amqpdriver [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1007.527125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.527125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.527125] env[62914]: DEBUG nova.network.neutron [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.547062] env[62914]: DEBUG nova.network.neutron [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updated VIF entry in instance network info cache for port 91711c66-4bec-40d9-b1be-9603bbad7e46. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1007.547439] env[62914]: DEBUG nova.network.neutron [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating instance_info_cache with network_info: [{"id": "91711c66-4bec-40d9-b1be-9603bbad7e46", "address": "fa:16:3e:97:22:3f", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91711c66-4b", "ovs_interfaceid": "91711c66-4bec-40d9-b1be-9603bbad7e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.558745] env[62914]: DEBUG oslo_concurrency.lockutils [None req-9784f911-c51b-4f84-9e79-5183adaa9275 tempest-ListImageFiltersTestJSON-1708948366 tempest-ListImageFiltersTestJSON-1708948366-project-member] Lock "12e8b0ac-0dec-4928-ae65-ab53992ecab5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.212s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.655849] env[62914]: DEBUG nova.network.neutron [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1007.727219] env[62914]: DEBUG nova.network.neutron [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.739765] env[62914]: DEBUG nova.network.neutron [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.764911] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832381, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.826483] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ea65007d-10e7-4fec-b4a3-d442c408de73 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-aedc785f-619f-4b9f-850f-790f84e57577-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 12.012s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.827839] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.828168] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.828606] env[62914]: DEBUG nova.objects.instance [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'flavor' on Instance uuid fa33e1a5-677a-489c-8c89-a33066b18103 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.963977] env[62914]: INFO nova.compute.manager [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Took 50.30 seconds to build instance. [ 1008.023908] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8aebac71-3f1a-4f04-bd73-d2871d3fae02 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.025436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.093s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.054484] env[62914]: DEBUG oslo_concurrency.lockutils [req-d44bff56-217e-4725-9c45-4f526198a8c3 req-aea93daa-038c-4beb-a20c-3098800bae30 service nova] Releasing lock "refresh_cache-06e8b438-01ef-481f-8e27-2faa01bb97aa" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.161932] env[62914]: DEBUG nova.network.neutron [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updated VIF entry in instance network info cache for port 24c487f8-b730-47b7-8817-5b3894271b0f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1008.162342] env[62914]: DEBUG nova.network.neutron [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [{"id": "24c487f8-b730-47b7-8817-5b3894271b0f", "address": "fa:16:3e:d2:78:70", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c487f8-b7", "ovs_interfaceid": "24c487f8-b730-47b7-8817-5b3894271b0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.233129] env[62914]: INFO nova.compute.manager [-] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Took 1.52 seconds to deallocate network for instance. [ 1008.243622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Releasing lock "refresh_cache-dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.244138] env[62914]: DEBUG nova.compute.manager [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1008.244400] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1008.245918] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52617e6-a6a7-4b2a-9662-6257a18e2f26 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.259859] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1008.259859] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b62977e-70d2-466e-9e72-50767ea9fc0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.265077] env[62914]: DEBUG oslo_vmware.api [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832381, 'name': PowerOnVM_Task, 'duration_secs': 1.12243} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.269229] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1008.269665] env[62914]: INFO nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Took 10.88 seconds to spawn the instance on the hypervisor. [ 1008.270147] env[62914]: DEBUG nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1008.271763] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529f257f-1a29-46b4-aa85-b484419cb1a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.276408] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1008.276408] env[62914]: value = "task-4832382" [ 1008.276408] env[62914]: _type = "Task" [ 1008.276408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.293589] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.322299] env[62914]: DEBUG nova.network.neutron [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.441949] env[62914]: DEBUG nova.objects.instance [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'pci_requests' on Instance uuid fa33e1a5-677a-489c-8c89-a33066b18103 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.465864] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a8468f9-ac51-416c-82a3-88a7ab1094ec tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.828s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.531790] env[62914]: INFO nova.compute.claims [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.665478] env[62914]: DEBUG oslo_concurrency.lockutils [req-9cc99919-18ae-46a9-8efa-9e2bd3f60e17 req-665f620b-5a11-479b-9f69-46bb3df19124 service nova] Releasing lock "refresh_cache-aedc785f-619f-4b9f-850f-790f84e57577" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.740202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.794029] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832382, 'name': PowerOffVM_Task, 'duration_secs': 0.342138} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.795817] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1008.796089] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1008.797452] env[62914]: INFO nova.compute.manager [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Took 50.69 seconds to build instance. [ 1008.797603] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c39fb657-accb-4d69-ae13-5a15b3c27ab8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.823710] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.827309] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1008.827468] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1008.827704] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Deleting the datastore file [datastore1] dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.827991] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02340410-74f4-41b8-b05c-a2908861f511 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.835851] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for the task: (returnval){ [ 1008.835851] env[62914]: value = "task-4832384" [ 1008.835851] env[62914]: _type = "Task" [ 1008.835851] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.846429] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.943682] env[62914]: DEBUG nova.objects.base [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1008.943992] env[62914]: DEBUG nova.network.neutron [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1009.027506] env[62914]: DEBUG nova.policy [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8a3ae8d2204c1d8c3a00c192657973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2562164f04b045a59b3b501d2b0014ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1009.038506] env[62914]: INFO nova.compute.resource_tracker [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating resource usage from migration 8803623a-ecb1-4ba5-be89-a9d239df5334 [ 1009.301357] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cac69c14-cd3f-4341-b093-b2217a88118c tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.205s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.354088] env[62914]: DEBUG oslo_vmware.api [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Task: {'id': task-4832384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416339} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.354510] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.354629] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1009.354815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1009.354994] env[62914]: INFO nova.compute.manager [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1009.355273] env[62914]: DEBUG oslo.service.loopingcall [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.355940] env[62914]: DEBUG nova.compute.manager [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1009.355940] env[62914]: DEBUG nova.network.neutron [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1009.375973] env[62914]: DEBUG nova.network.neutron [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1009.407243] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8313deba-a311-4180-b30a-a8e24e2e19f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.415898] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda1b0e3-3dca-49eb-a465-09382c94475f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.450552] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e38ac5-b99f-4622-a748-b57aed0763e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.458931] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12bbb32-976a-4448-a62e-70c0e7996b41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.473862] env[62914]: DEBUG nova.compute.provider_tree [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.661228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.661613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.661816] env[62914]: INFO nova.compute.manager [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Shelving [ 1009.878806] env[62914]: DEBUG nova.network.neutron [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.977030] env[62914]: DEBUG nova.scheduler.client.report [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.118916] env[62914]: DEBUG nova.compute.manager [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1010.119110] env[62914]: DEBUG nova.compute.manager [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing instance network info cache due to event network-changed-de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1010.119267] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.119454] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.119630] env[62914]: DEBUG nova.network.neutron [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.169313] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1010.169606] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84bc9bdc-14e9-4664-abdc-b6499af9a97f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.179026] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1010.179026] env[62914]: value = "task-4832385" [ 1010.179026] env[62914]: _type = "Task" [ 1010.179026] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.189659] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.341083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e076394f-db28-4039-adc4-c47cc1fbba20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.366195] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1010.381574] env[62914]: INFO nova.compute.manager [-] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Took 1.03 seconds to deallocate network for instance. [ 1010.482169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.457s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.482442] env[62914]: INFO nova.compute.manager [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Migrating [ 1010.491138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.989s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.491516] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.494123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.914s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.494381] env[62914]: DEBUG nova.objects.instance [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1010.524680] env[62914]: INFO nova.scheduler.client.report [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Deleted allocations for instance b443050b-78ae-4f9d-81d4-508f5cf4a322 [ 1010.580300] env[62914]: DEBUG nova.network.neutron [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Successfully updated port: b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.690652] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832385, 'name': PowerOffVM_Task, 'duration_secs': 0.285244} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.690958] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1010.692315] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d860512d-cd89-476f-8f73-c6f0a25404dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.715145] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6baf2d-a782-4487-a12f-b8c6857c0b1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.770158] env[62914]: DEBUG nova.compute.manager [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1010.774974] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91d4280-cc73-45c1-97dd-b8399d446490 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.872850] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1010.873637] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5e77b14-7049-45a0-be1a-1cbaeee1514a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.883221] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1010.883221] env[62914]: value = "task-4832386" [ 1010.883221] env[62914]: _type = "Task" [ 1010.883221] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.887884] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.892571] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.915730] env[62914]: DEBUG nova.network.neutron [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updated VIF entry in instance network info cache for port de62c681-4ead-4636-8a49-3bcab66952b9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.916172] env[62914]: DEBUG nova.network.neutron [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.008538] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.008732] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.008914] env[62914]: DEBUG nova.network.neutron [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1011.040058] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cc8d08d5-f725-475c-9c18-87fb1d7fc7df tempest-ServerDiagnosticsV248Test-611892576 tempest-ServerDiagnosticsV248Test-611892576-project-member] Lock "b443050b-78ae-4f9d-81d4-508f5cf4a322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.824s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.083019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.231552] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1011.231932] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ee53e2de-d0ad-436d-8303-e20473e4d4c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.241459] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1011.241459] env[62914]: value = "task-4832387" [ 1011.241459] env[62914]: _type = "Task" [ 1011.241459] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.254024] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832387, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.290592] env[62914]: INFO nova.compute.manager [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] instance snapshotting [ 1011.294391] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06127c67-4133-4fd0-9ec5-39ab69a75f1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.314958] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41253c9e-f5cb-491f-a98d-efb20e511228 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.393805] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832386, 'name': PowerOffVM_Task, 'duration_secs': 0.222537} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.394146] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1011.394345] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1011.419162] env[62914]: DEBUG oslo_concurrency.lockutils [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.419669] env[62914]: DEBUG nova.compute.manager [req-6c1f7d9d-29fa-44e7-b5f7-fbde7fb7ae0d req-bbccab24-19e4-4116-9afc-be5ae15c2c54 service nova] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Received event network-vif-deleted-771b1fec-a03e-465e-93bb-e565d996e361 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1011.419968] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.420182] env[62914]: DEBUG nova.network.neutron [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1011.516887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-4dbd4c29-4ea6-4d28-aeac-cb3856d7db52 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.518031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.660s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.519679] env[62914]: INFO nova.compute.claims [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.760495] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832387, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.827600] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1011.830752] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bedaea2d-0e60-47c0-8c72-63d3f39ed751 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.838816] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1011.838816] env[62914]: value = "task-4832388" [ 1011.838816] env[62914]: _type = "Task" [ 1011.838816] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.848940] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832388, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.874530] env[62914]: DEBUG nova.network.neutron [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.902682] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1011.902895] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1011.902945] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.903146] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1011.903307] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.903468] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1011.903684] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1011.903853] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1011.904039] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1011.904215] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1011.904401] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.910055] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cedad8ae-cc3f-4235-b94c-6f6d8bb64826 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.929112] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1011.929112] env[62914]: value = "task-4832389" [ 1011.929112] env[62914]: _type = "Task" [ 1011.929112] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.940454] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.000096] env[62914]: WARNING nova.network.neutron [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] 9be47f79-b984-4fc2-a590-a80f36132ab1 already exists in list: networks containing: ['9be47f79-b984-4fc2-a590-a80f36132ab1']. ignoring it [ 1012.256457] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832387, 'name': CreateSnapshot_Task, 'duration_secs': 0.854186} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.256917] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1012.257735] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ede7e77-ac0a-4c82-86fc-fc11026e9ddd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.348682] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832388, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.377822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.397606] env[62914]: DEBUG nova.network.neutron [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "address": "fa:16:3e:a6:45:00", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e109fe-6c", "ovs_interfaceid": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.445080] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832389, 'name': ReconfigVM_Task, 'duration_secs': 0.223541} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.445686] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1012.769331] env[62914]: DEBUG nova.compute.manager [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1012.769331] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.769461] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.769676] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.769808] env[62914]: DEBUG nova.compute.manager [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] No waiting events found dispatching network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1012.769977] env[62914]: WARNING nova.compute.manager [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received unexpected event network-vif-plugged-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc for instance with vm_state active and task_state None. [ 1012.770158] env[62914]: DEBUG nova.compute.manager [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-changed-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1012.770306] env[62914]: DEBUG nova.compute.manager [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing instance network info cache due to event network-changed-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1012.770478] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.782214] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1012.785436] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ab5fd141-84e2-498d-b291-ffab7fcbe80b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.794908] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1012.794908] env[62914]: value = "task-4832390" [ 1012.794908] env[62914]: _type = "Task" [ 1012.794908] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.805725] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.851650] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832388, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.901101] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.902109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.902311] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.902981] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.903303] env[62914]: DEBUG nova.network.neutron [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Refreshing network info cache for port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1012.905674] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecf6654-4f13-43bc-85a4-f4ca26d7590e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.926022] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.926431] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.926623] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.926873] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.927104] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.927321] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.927568] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.927825] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.928134] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.928581] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.928865] env[62914]: DEBUG nova.virt.hardware [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.935546] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfiguring VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1012.938257] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed71b87f-9224-40a0-859a-061cd9f36f99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.951269] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2283688-01b1-46b4-bc87-75ec62afb5fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.956925] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.957231] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.957446] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.957613] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.957809] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.957975] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.958196] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.958361] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.958531] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.958695] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.959012] env[62914]: DEBUG nova.virt.hardware [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.964480] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1012.965188] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-989d8562-8743-42ac-99be-cdb9f81e51a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.988430] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9430b30-cc30-43c5-960b-9ce9608606fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.992242] env[62914]: DEBUG oslo_vmware.api [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1012.992242] env[62914]: value = "task-4832391" [ 1012.992242] env[62914]: _type = "Task" [ 1012.992242] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.994313] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1012.994313] env[62914]: value = "task-4832392" [ 1012.994313] env[62914]: _type = "Task" [ 1012.994313] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.030544] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c44ea52-156c-41ce-bb7b-78b2778af6d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.040133] env[62914]: DEBUG oslo_vmware.api [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.040565] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.046408] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cf73f6-5d69-4c7f-b358-2f0be45a84f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.062605] env[62914]: DEBUG nova.compute.provider_tree [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.307186] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.351255] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832388, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.511597] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832392, 'name': ReconfigVM_Task, 'duration_secs': 0.240134} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.511976] env[62914]: DEBUG oslo_vmware.api [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.514360] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1013.515241] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d79febd-e623-4d12-b39d-e9dd968229b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.541801] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.544702] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71cdbbea-fd5c-4560-9836-8e2ce4e2ab30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.566086] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1013.566086] env[62914]: value = "task-4832393" [ 1013.566086] env[62914]: _type = "Task" [ 1013.566086] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.567185] env[62914]: DEBUG nova.scheduler.client.report [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.580467] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832393, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.668813] env[62914]: DEBUG nova.network.neutron [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updated VIF entry in instance network info cache for port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1013.669436] env[62914]: DEBUG nova.network.neutron [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "address": "fa:16:3e:a6:45:00", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e109fe-6c", "ovs_interfaceid": "b3e109fe-6c2f-407e-97fd-39b74b3bc4bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.810931] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.854437] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832388, 'name': CreateSnapshot_Task, 'duration_secs': 1.542468} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.854437] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1013.854437] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faf6240-e145-4b71-ae34-7ce4f2903e1f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.894414] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f89e147-f6a6-4a9e-95d8-fdeb2aaa09bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.913879] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1014.005513] env[62914]: DEBUG oslo_vmware.api [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832391, 'name': ReconfigVM_Task, 'duration_secs': 0.770325} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.005835] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.006080] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfigured VM to attach interface {{(pid=62914) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1014.076525] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.077113] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1014.079947] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832393, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.080478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.041s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.080711] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.083030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.808s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.084289] env[62914]: INFO nova.compute.claims [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.113532] env[62914]: INFO nova.scheduler.client.report [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted allocations for instance dac99ed2-aed9-4c3e-bcab-a8de9967990c [ 1014.172567] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bb6593c-d0fa-46fd-8a74-f90b2dbbb224 req-08963329-6a6d-4ea4-97b1-9f6a67036f83 service nova] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.307879] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.374935] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1014.375341] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-239ec2da-f3e6-4dbc-ac99-471705e570fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.384748] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1014.384748] env[62914]: value = "task-4832394" [ 1014.384748] env[62914]: _type = "Task" [ 1014.384748] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.394412] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.420693] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1014.420919] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30b4d378-f6a4-40bd-9c0a-96232938ecb6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.428983] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1014.428983] env[62914]: value = "task-4832395" [ 1014.428983] env[62914]: _type = "Task" [ 1014.428983] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.438476] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.511439] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2315b673-bf86-4322-a677-74ad062435e5 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.683s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.579937] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832393, 'name': ReconfigVM_Task, 'duration_secs': 0.51796} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.580439] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.580938] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1014.586523] env[62914]: DEBUG nova.compute.utils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1014.588129] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1014.588257] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1014.622113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f7d1b0e7-ebfb-47b1-ab6b-030e4a26f639 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "dac99ed2-aed9-4c3e-bcab-a8de9967990c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.129s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.649300] env[62914]: DEBUG nova.policy [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f5f245cdbbb48f3a6a46981fad0a139', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd77829ac81cd41f2a4acdd571295ca6d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1014.809809] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.898303] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.939939] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832395, 'name': PowerOffVM_Task, 'duration_secs': 0.406276} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.940281] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1014.940481] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1015.022656] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Successfully created port: f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.090714] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b720a103-b6dd-4987-a6ff-45c94b93a9e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.095337] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1015.133490] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a53aa8e-06fd-441b-979c-6055d972e789 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.159707] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1015.315215] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.398665] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.448783] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1015.449163] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1015.449350] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.449652] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1015.449831] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.450091] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1015.450341] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1015.450870] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1015.451085] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1015.451269] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1015.451743] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1015.459519] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf7dcbe4-9ffc-44fc-bccc-348378d60fbe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.476880] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1015.476880] env[62914]: value = "task-4832396" [ 1015.476880] env[62914]: _type = "Task" [ 1015.476880] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.491917] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832396, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.498906] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1645e6fa-b16f-4861-ac9b-f9d332ea3523 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.507863] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5defa7e8-f7eb-4041-a865-2d5426ead0b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.541374] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7812f9-da94-4daf-8ddf-6a22c54dfaa5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.549240] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b50b98-e338-40ca-a7a6-8fd2c78c568e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.568457] env[62914]: DEBUG nova.compute.provider_tree [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.813429] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.897028] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.991547] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832396, 'name': ReconfigVM_Task, 'duration_secs': 0.208735} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.992163] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1016.074430] env[62914]: DEBUG nova.scheduler.client.report [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.113432] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.145772] env[62914]: DEBUG nova.virt.hardware [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.146422] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796883fa-72cc-4e0b-b8d5-6a24851786f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.156595] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d95aac2-fcc6-42c9-b4d2-a61909306b07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.315491] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832390, 'name': CloneVM_Task, 'duration_secs': 3.422506} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.316231] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Created linked-clone VM from snapshot [ 1016.317061] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8d2232-7670-4d3a-ae81-cfb3d65f09ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.326580] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Uploading image 4d62b4b6-d832-4dbd-be0d-027df1fdaff5 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1016.356260] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1016.356260] env[62914]: value = "vm-942032" [ 1016.356260] env[62914]: _type = "VirtualMachine" [ 1016.356260] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1016.357338] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d39815ed-6cc3-4c8e-86fa-9452c3add228 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.367215] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lease: (returnval){ [ 1016.367215] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ee429-c21c-f2f8-cdd9-a92f6f5a61a5" [ 1016.367215] env[62914]: _type = "HttpNfcLease" [ 1016.367215] env[62914]: } obtained for exporting VM: (result){ [ 1016.367215] env[62914]: value = "vm-942032" [ 1016.367215] env[62914]: _type = "VirtualMachine" [ 1016.367215] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1016.367890] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the lease: (returnval){ [ 1016.367890] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ee429-c21c-f2f8-cdd9-a92f6f5a61a5" [ 1016.367890] env[62914]: _type = "HttpNfcLease" [ 1016.367890] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1016.375041] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1016.375041] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ee429-c21c-f2f8-cdd9-a92f6f5a61a5" [ 1016.375041] env[62914]: _type = "HttpNfcLease" [ 1016.375041] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1016.401798] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.501243] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.501579] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.501809] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.502036] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.502250] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.502423] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.502651] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.502791] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.502963] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.503142] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.503320] env[62914]: DEBUG nova.virt.hardware [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.513133] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1016.513483] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aebe63b3-84eb-48f5-9868-b59af44d0e04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.529184] env[62914]: DEBUG nova.compute.manager [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Received event network-vif-plugged-f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1016.529465] env[62914]: DEBUG oslo_concurrency.lockutils [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] Acquiring lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.529699] env[62914]: DEBUG oslo_concurrency.lockutils [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.529899] env[62914]: DEBUG oslo_concurrency.lockutils [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.530099] env[62914]: DEBUG nova.compute.manager [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] No waiting events found dispatching network-vif-plugged-f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1016.530286] env[62914]: WARNING nova.compute.manager [req-2dbf6953-7020-4ad6-ac09-0c2f0ba64d77 req-372704aa-cae0-4a95-97e8-98f8a94ca5d6 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Received unexpected event network-vif-plugged-f752f060-cdfa-4b16-904d-9263dfa26442 for instance with vm_state building and task_state spawning. [ 1016.537161] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.537421] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.538785] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1016.538785] env[62914]: value = "task-4832398" [ 1016.538785] env[62914]: _type = "Task" [ 1016.538785] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.549036] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.579987] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.580593] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1016.583670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.877s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.584029] env[62914]: DEBUG nova.objects.instance [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lazy-loading 'resources' on Instance uuid d9476d24-fbc5-4e30-bf67-85c388e943fd {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.875024] env[62914]: DEBUG nova.network.neutron [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Port 94d0e4cd-493e-4e41-89dc-b0636889e9d9 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1016.877853] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1016.877853] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ee429-c21c-f2f8-cdd9-a92f6f5a61a5" [ 1016.877853] env[62914]: _type = "HttpNfcLease" [ 1016.877853] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1016.878351] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1016.878351] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ee429-c21c-f2f8-cdd9-a92f6f5a61a5" [ 1016.878351] env[62914]: _type = "HttpNfcLease" [ 1016.878351] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1016.879100] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ce837b-0043-4dfa-a822-16f5fb2270bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.889625] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1016.889751] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1016.957733] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.026686] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-97b114b7-0475-4ddb-9f02-8d151be4899f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.040728] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.040959] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.042174] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f76e74f-6317-495a-ae3a-3d8d51f6063a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.057399] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832398, 'name': ReconfigVM_Task, 'duration_secs': 0.18946} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.074430] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1017.075682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5152689f-ad10-4201-b11b-61fa5ebc458d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.078905] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53e3785-f6bc-4474-8d4b-712c1ed0e793 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.091754] env[62914]: DEBUG nova.compute.utils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1017.097956] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1017.098170] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1017.136157] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.141982] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfiguring VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1017.146518] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a3538ba-0ed6-4db8-930f-6736c6079304 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.159921] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e818fdd0-b8eb-46e6-9ad2-9c261be09953 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.175699] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Successfully updated port: f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.183782] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1017.183782] env[62914]: value = "task-4832400" [ 1017.183782] env[62914]: _type = "Task" [ 1017.183782] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.185420] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1017.185420] env[62914]: value = "task-4832399" [ 1017.185420] env[62914]: _type = "Task" [ 1017.185420] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.202724] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832400, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.206579] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.228972] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.229938] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.243238] env[62914]: DEBUG nova.policy [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f5f245cdbbb48f3a6a46981fad0a139', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd77829ac81cd41f2a4acdd571295ca6d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1017.402894] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832394, 'name': CloneVM_Task, 'duration_secs': 2.782688} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.404066] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Created linked-clone VM from snapshot [ 1017.405101] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d3f646-728f-41c6-8d02-6f057e66c0a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.420913] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Uploading image d1319938-0b9a-4244-87be-3d610f37b34c {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1017.437267] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1017.437584] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b407aee6-62e4-4893-bcbd-d5a066906a3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.450879] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1017.450879] env[62914]: value = "task-4832401" [ 1017.450879] env[62914]: _type = "Task" [ 1017.450879] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.464937] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832401, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.531103] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671c7f31-24c7-4002-a8d3-beb2df7b8e6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.539756] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c8a0f9-8176-4fc0-9478-0552a2af347c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.574638] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7566bd0-c0d3-41eb-ae27-037f947cedb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.587277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8bde77-c1bd-4173-a278-9718634ae379 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.594108] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.594502] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.594843] env[62914]: INFO nova.compute.manager [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Shelving [ 1017.608670] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1017.610239] env[62914]: DEBUG nova.compute.provider_tree [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.620054] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1017.620054] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c38024e-8c1d-4262-82cb-905d159ebad8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.628810] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1017.628810] env[62914]: value = "task-4832402" [ 1017.628810] env[62914]: _type = "Task" [ 1017.628810] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.641705] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.679777] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.680453] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.680556] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1017.709063] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832400, 'name': ReconfigVM_Task, 'duration_secs': 0.347848} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.713281] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7/120fa16e-60cd-4326-b6c4-f1df419dbcb7.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.713862] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1017.718366] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.734125] env[62914]: INFO nova.compute.manager [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Detaching volume 37c44301-e13e-475c-b93b-3d45c6886107 [ 1017.778819] env[62914]: DEBUG nova.compute.manager [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Received event network-changed-f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1017.779086] env[62914]: DEBUG nova.compute.manager [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Refreshing instance network info cache due to event network-changed-f752f060-cdfa-4b16-904d-9263dfa26442. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1017.779820] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] Acquiring lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.794854] env[62914]: INFO nova.virt.block_device [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Attempting to driver detach volume 37c44301-e13e-475c-b93b-3d45c6886107 from mountpoint /dev/sdb [ 1017.795863] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1017.795863] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941994', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'name': 'volume-37c44301-e13e-475c-b93b-3d45c6886107', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'dc99b470-4334-408d-8853-d2e9b9204d04', 'attached_at': '', 'detached_at': '', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'serial': '37c44301-e13e-475c-b93b-3d45c6886107'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1017.796742] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5398a4c4-c88c-4ebf-8d03-b334d689561a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.844443] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdcd55a-0614-41a2-a0a6-4c15d4357e7d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.856870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06182921-99fe-45d8-9dc1-ec48a9ac7868 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.902900] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b51e386-4d0e-4f07-a03f-01d65bd3c735 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.910447] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Successfully created port: bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.918613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.918963] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.919693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.936839] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] The volume has not been displaced from its original location: [datastore1] volume-37c44301-e13e-475c-b93b-3d45c6886107/volume-37c44301-e13e-475c-b93b-3d45c6886107.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1017.942563] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfiguring VM instance instance-00000038 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1017.944901] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dd1fbb8-658f-4fd9-b80d-1c270b96b01a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.974523] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832401, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.976506] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1017.976506] env[62914]: value = "task-4832403" [ 1017.976506] env[62914]: _type = "Task" [ 1017.976506] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.989363] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.117467] env[62914]: DEBUG nova.scheduler.client.report [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1018.147106] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832402, 'name': PowerOffVM_Task, 'duration_secs': 0.245149} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.149173] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1018.151019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c86f2f4-1935-4424-b77a-de2ae594117e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.191016] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3055cc68-c0d1-40f3-9d91-cae85499ba1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.207683] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.221555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ab9216-8759-4067-98f9-861c8f3235c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.243203] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1018.246270] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee022867-e3a4-407f-b04f-bf32b8bff4bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.275595] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1018.435865] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.436263] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.447526] env[62914]: DEBUG nova.network.neutron [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [{"id": "f752f060-cdfa-4b16-904d-9263dfa26442", "address": "fa:16:3e:df:33:35", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752f060-cd", "ovs_interfaceid": "f752f060-cdfa-4b16-904d-9263dfa26442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.479672] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832401, 'name': Destroy_Task, 'duration_secs': 0.71885} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.484821] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Destroyed the VM [ 1018.486338] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1018.486770] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4c96268f-b770-4875-b5ef-82a8b8f9bb3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.498406] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832403, 'name': ReconfigVM_Task, 'duration_secs': 0.4262} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.500381] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Reconfigured VM instance instance-00000038 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1018.508830] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1018.508830] env[62914]: value = "task-4832404" [ 1018.508830] env[62914]: _type = "Task" [ 1018.508830] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.508830] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c5390bc-acca-407d-8acc-6f70edd40eba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.532631] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1018.532631] env[62914]: value = "task-4832405" [ 1018.532631] env[62914]: _type = "Task" [ 1018.532631] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.532952] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.543051] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832405, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.623778] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1018.626841] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.630634] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.629s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.633061] env[62914]: INFO nova.compute.claims [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.649084] env[62914]: INFO nova.scheduler.client.report [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted allocations for instance d9476d24-fbc5-4e30-bf67-85c388e943fd [ 1018.668515] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.668859] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.669093] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.669362] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.669600] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.669860] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.670046] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.670224] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.670400] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.670626] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.670830] env[62914]: DEBUG nova.virt.hardware [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.672018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0b2841-1bf8-4ed7-9257-614c818915ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.682171] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b415a2-e6ae-4b81-84be-1b5464681517 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.713448] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1018.713880] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.714230] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e4bd2fff-b2d4-439f-b7fd-4c133ae5995a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.728555] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1018.728555] env[62914]: value = "task-4832406" [ 1018.728555] env[62914]: _type = "Task" [ 1018.728555] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.739192] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832406, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.827878] env[62914]: DEBUG nova.network.neutron [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Port 2295762d-8e27-469d-a292-9ef453b210d6 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1018.940571] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1018.952330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.952330] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Instance network_info: |[{"id": "f752f060-cdfa-4b16-904d-9263dfa26442", "address": "fa:16:3e:df:33:35", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752f060-cd", "ovs_interfaceid": "f752f060-cdfa-4b16-904d-9263dfa26442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1018.952963] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] Acquired lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.953361] env[62914]: DEBUG nova.network.neutron [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Refreshing network info cache for port f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1018.955448] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:33:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f752f060-cdfa-4b16-904d-9263dfa26442', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.963395] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating folder: Project (d77829ac81cd41f2a4acdd571295ca6d). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1018.964736] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1a511a6-c585-4262-836d-204dc7aa2850 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.968248] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.968500] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.968751] env[62914]: DEBUG nova.network.neutron [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1018.981111] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created folder: Project (d77829ac81cd41f2a4acdd571295ca6d) in parent group-v941773. [ 1018.981831] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating folder: Instances. Parent ref: group-v942035. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1018.981831] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2326c14-1acf-42e5-b1d3-f44d798d096f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.993496] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created folder: Instances in parent group-v942035. [ 1018.993821] env[62914]: DEBUG oslo.service.loopingcall [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.994045] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1018.994283] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50fc035c-4786-4bb5-bfbe-f25cc1f2e3d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.018263] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1019.018263] env[62914]: value = "task-4832409" [ 1019.018263] env[62914]: _type = "Task" [ 1019.018263] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.037994] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832409, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.045665] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.053513] env[62914]: DEBUG oslo_vmware.api [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832405, 'name': ReconfigVM_Task, 'duration_secs': 0.217176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.053980] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-941994', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'name': 'volume-37c44301-e13e-475c-b93b-3d45c6886107', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'dc99b470-4334-408d-8853-d2e9b9204d04', 'attached_at': '', 'detached_at': '', 'volume_id': '37c44301-e13e-475c-b93b-3d45c6886107', 'serial': '37c44301-e13e-475c-b93b-3d45c6886107'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1019.157475] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76c8d719-e253-4134-9a91-92c8ee88fab0 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "d9476d24-fbc5-4e30-bf67-85c388e943fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.685s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.213874] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.241071] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832406, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.468662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.537739] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832409, 'name': CreateVM_Task, 'duration_secs': 0.48145} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.541886] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1019.542378] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "55192659-4d65-4e74-a47f-46d650b6b095" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.542718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.542979] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "55192659-4d65-4e74-a47f-46d650b6b095-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.543198] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.543393] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.545176] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832404, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.545732] env[62914]: INFO nova.compute.manager [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Terminating instance [ 1019.547659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.547850] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.548263] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.548936] env[62914]: DEBUG nova.compute.manager [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1019.549175] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1019.549460] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3e1e79a-eb71-41b0-ad71-8c065d417c3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.551966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af0362d-031f-4fbe-a62e-5e4fbaf465cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.559349] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1019.559349] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52401eac-e3ce-6262-56eb-094d929705f0" [ 1019.559349] env[62914]: _type = "Task" [ 1019.559349] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.562493] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1019.566540] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32425b85-1710-4aa7-94ce-388189b5b80a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.575682] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52401eac-e3ce-6262-56eb-094d929705f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.580135] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1019.580135] env[62914]: value = "task-4832410" [ 1019.580135] env[62914]: _type = "Task" [ 1019.580135] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.592207] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.627348] env[62914]: DEBUG nova.objects.instance [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'flavor' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.725555] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.743807] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832406, 'name': CreateSnapshot_Task, 'duration_secs': 0.788702} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.746960] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1019.748772] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941c2e5d-78ca-433b-be86-c4db597c5dcd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.861779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.862124] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.862356] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.949701] env[62914]: DEBUG nova.compute.manager [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Received event network-vif-plugged-bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1019.949701] env[62914]: DEBUG oslo_concurrency.lockutils [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] Acquiring lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.949701] env[62914]: DEBUG oslo_concurrency.lockutils [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.949701] env[62914]: DEBUG oslo_concurrency.lockutils [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.950775] env[62914]: DEBUG nova.compute.manager [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] No waiting events found dispatching network-vif-plugged-bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1019.951125] env[62914]: WARNING nova.compute.manager [req-3c896c58-7627-4ab2-b3d5-20b849b23b85 req-7491a7cb-a8b0-41d8-a1d3-d1fce5173ae4 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Received unexpected event network-vif-plugged-bd8a334c-ccd2-4d47-8194-494527e06ae9 for instance with vm_state building and task_state spawning. [ 1019.960142] env[62914]: DEBUG nova.network.neutron [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.964178] env[62914]: DEBUG nova.network.neutron [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updated VIF entry in instance network info cache for port f752f060-cdfa-4b16-904d-9263dfa26442. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1019.964697] env[62914]: DEBUG nova.network.neutron [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [{"id": "f752f060-cdfa-4b16-904d-9263dfa26442", "address": "fa:16:3e:df:33:35", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752f060-cd", "ovs_interfaceid": "f752f060-cdfa-4b16-904d-9263dfa26442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.045436] env[62914]: DEBUG oslo_vmware.api [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832404, 'name': RemoveSnapshot_Task, 'duration_secs': 1.284944} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.045436] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1020.076952] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52401eac-e3ce-6262-56eb-094d929705f0, 'name': SearchDatastore_Task, 'duration_secs': 0.014349} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.077318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.078040] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.078040] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.078040] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.078240] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.081141] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f8bf600-70e3-450e-9776-d64d5e065690 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.093758] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832410, 'name': PowerOffVM_Task, 'duration_secs': 0.256228} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.096900] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1020.097126] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1020.098660] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9b782ed-b28f-4a19-961b-d6828782a3c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.101245] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.101425] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1020.102478] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4576654-047c-4c4f-b852-c84c62f34252 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.109352] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1020.109352] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52568e40-3abd-e586-2c9a-66a917762850" [ 1020.109352] env[62914]: _type = "Task" [ 1020.109352] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.111612] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0778cbe-4acc-49ca-acd6-aa088fea2dec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.124474] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52568e40-3abd-e586-2c9a-66a917762850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.127451] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c6df82-a354-49cd-9aee-76a8883b6da0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.175069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6757abff-98da-4919-9006-4979c6e5bbb7 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 2.945s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.177156] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Successfully updated port: bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.179373] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417e51a4-daf3-4981-9878-f69a5e5f78f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.193528] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec46888-d75e-4baa-882b-ea6bb14d2026 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.202083] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1020.202365] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1020.202626] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore2] 55192659-4d65-4e74-a47f-46d650b6b095 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.203470] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3d17975-dbb6-4adf-bfe1-14108284bb67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.215956] env[62914]: DEBUG nova.compute.provider_tree [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.228775] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1020.228775] env[62914]: value = "task-4832412" [ 1020.228775] env[62914]: _type = "Task" [ 1020.228775] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.229208] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.239176] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.271459] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1020.272184] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c016af9-8d02-43d6-a9c6-28b61e4a1a7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.281530] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1020.281530] env[62914]: value = "task-4832413" [ 1020.281530] env[62914]: _type = "Task" [ 1020.281530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.292743] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.461364] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.467883] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ec65d5a-c471-40de-868f-6ec2cee8958c req-ccfb36d1-95df-4d72-b8ca-ce93258f10f9 service nova] Releasing lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.551284] env[62914]: WARNING nova.compute.manager [None req-4cb73649-5226-419b-a495-eb96635c676e tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Image not found during snapshot: nova.exception.ImageNotFound: Image d1319938-0b9a-4244-87be-3d610f37b34c could not be found. [ 1020.633890] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52568e40-3abd-e586-2c9a-66a917762850, 'name': SearchDatastore_Task, 'duration_secs': 0.032489} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.636020] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7daae20-0c41-4d9e-94ce-18a83f04a5ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.645848] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1020.645848] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5a8ec-37ed-8ace-01de-725ccb0b51d0" [ 1020.645848] env[62914]: _type = "Task" [ 1020.645848] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.662068] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5a8ec-37ed-8ace-01de-725ccb0b51d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.685543] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.685802] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.686058] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1020.723126] env[62914]: DEBUG nova.scheduler.client.report [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.742213] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.751915] env[62914]: DEBUG oslo_vmware.api [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384589} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.752307] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.752455] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1020.752677] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1020.752866] env[62914]: INFO nova.compute.manager [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1020.753135] env[62914]: DEBUG oslo.service.loopingcall [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.753358] env[62914]: DEBUG nova.compute.manager [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1020.753456] env[62914]: DEBUG nova.network.neutron [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1020.793305] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.903678] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.903886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.904699] env[62914]: DEBUG nova.network.neutron [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1020.972885] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6660b59-0342-4b64-b3b7-9b01fcaa4787 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.981446] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5602ed96-d211-4425-a6dd-ccb37003d980 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.026194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "79c7728a-0452-44ec-91de-62e3f09f9183" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.026194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.026194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.026194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.026194] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.029327] env[62914]: INFO nova.compute.manager [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Terminating instance [ 1021.031974] env[62914]: DEBUG nova.compute.manager [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1021.032703] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1021.033956] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3487b8-9230-401a-94d1-e050f9684cb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.045199] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1021.045199] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5aab91b8-4615-4996-a44f-fa33ea5a4cd8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.052272] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1021.052272] env[62914]: value = "task-4832414" [ 1021.052272] env[62914]: _type = "Task" [ 1021.052272] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.066384] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.158355] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5a8ec-37ed-8ace-01de-725ccb0b51d0, 'name': SearchDatastore_Task, 'duration_secs': 0.014871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.158855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.159011] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/bd973845-e7cf-4c5a-9a6b-3ae15ada9f64.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1021.159500] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-263ff3e0-7b17-4a04-9600-978fca400a83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.168137] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1021.168137] env[62914]: value = "task-4832415" [ 1021.168137] env[62914]: _type = "Task" [ 1021.168137] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.178898] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.219270] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1021.234041] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.234869] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1021.239374] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.239728] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.239906] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.240111] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.240325] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.242754] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.243437] env[62914]: INFO nova.compute.manager [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Terminating instance [ 1021.247752] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.923s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.247915] env[62914]: DEBUG nova.objects.instance [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1021.254815] env[62914]: DEBUG nova.compute.manager [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1021.254815] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1021.255284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570f7e4d-c4fe-4e37-8c45-7453d4bef18a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.265079] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1021.265401] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1a1a3f9-f118-41a0-84a3-d3042a8306e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.272789] env[62914]: DEBUG oslo_vmware.api [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1021.272789] env[62914]: value = "task-4832416" [ 1021.272789] env[62914]: _type = "Task" [ 1021.272789] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.286709] env[62914]: DEBUG oslo_vmware.api [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.299602] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.486637] env[62914]: DEBUG nova.network.neutron [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updating instance_info_cache with network_info: [{"id": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "address": "fa:16:3e:c1:a5:65", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd8a334c-cc", "ovs_interfaceid": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.566772] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832414, 'name': PowerOffVM_Task, 'duration_secs': 0.333002} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.567104] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1021.567287] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1021.570992] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7943720-7140-4839-b56e-e9854aacda59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.652060] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1021.652516] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1021.652738] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleting the datastore file [datastore1] 79c7728a-0452-44ec-91de-62e3f09f9183 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.652843] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee9aaa67-7da0-4eda-9659-cab79abfc06e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.662895] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for the task: (returnval){ [ 1021.662895] env[62914]: value = "task-4832418" [ 1021.662895] env[62914]: _type = "Task" [ 1021.662895] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.675860] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.683229] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832415, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.740481] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.749112] env[62914]: DEBUG nova.compute.utils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1021.750837] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1021.751513] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1021.787092] env[62914]: DEBUG oslo_vmware.api [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832416, 'name': PowerOffVM_Task, 'duration_secs': 0.304112} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.790768] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1021.790997] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1021.794908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a01403a5-0fba-44f2-a9ed-65f29dac9afe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.802131] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.813652] env[62914]: DEBUG nova.policy [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1021.845990] env[62914]: DEBUG nova.network.neutron [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.915646] env[62914]: DEBUG nova.network.neutron [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.985748] env[62914]: DEBUG nova.compute.manager [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Received event network-changed-bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1021.986010] env[62914]: DEBUG nova.compute.manager [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Refreshing instance network info cache due to event network-changed-bd8a334c-ccd2-4d47-8194-494527e06ae9. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1021.986580] env[62914]: DEBUG oslo_concurrency.lockutils [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] Acquiring lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.989537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.989883] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Instance network_info: |[{"id": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "address": "fa:16:3e:c1:a5:65", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd8a334c-cc", "ovs_interfaceid": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1021.990538] env[62914]: DEBUG oslo_concurrency.lockutils [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] Acquired lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.990731] env[62914]: DEBUG nova.network.neutron [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Refreshing network info cache for port bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1021.992199] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:a5:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd8a334c-ccd2-4d47-8194-494527e06ae9', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.000263] env[62914]: DEBUG oslo.service.loopingcall [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.001668] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1022.001923] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dae57cc6-c577-4fd5-8092-55452213cdd6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.026369] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.026369] env[62914]: value = "task-4832420" [ 1022.026369] env[62914]: _type = "Task" [ 1022.026369] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.041798] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832420, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.077365] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1022.077755] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1022.078049] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore1] dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.078335] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c56eb6-7f85-4a56-8f5f-fc8ce1b670ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.086767] env[62914]: DEBUG oslo_vmware.api [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1022.086767] env[62914]: value = "task-4832421" [ 1022.086767] env[62914]: _type = "Task" [ 1022.086767] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.134842] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74785597-41a4-4df8-bad4-d76202fec98e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.138788] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Successfully created port: 87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.163309] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0bf430-1eac-4500-9a26-394c12ebe704 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.177422] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1022.186268] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.194122] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832415, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597158} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.194197] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/bd973845-e7cf-4c5a-9a6b-3ae15ada9f64.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1022.194556] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.194935] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e3de7d5-0b95-41fa-a14f-cfc6d4b9edc2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.204336] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1022.204336] env[62914]: value = "task-4832422" [ 1022.204336] env[62914]: _type = "Task" [ 1022.204336] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.215645] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.236723] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.258032] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1022.261166] env[62914]: DEBUG oslo_concurrency.lockutils [None req-eb0cab72-de2c-4b8e-a8ff-42382a7802a3 tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.262764] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.523s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.263335] env[62914]: DEBUG nova.objects.instance [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lazy-loading 'resources' on Instance uuid b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.300071] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.349015] env[62914]: INFO nova.compute.manager [-] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Took 1.60 seconds to deallocate network for instance. [ 1022.418797] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.541732] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832420, 'name': CreateVM_Task, 'duration_secs': 0.440372} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.541941] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1022.542732] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.542931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.543333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1022.543547] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c1bc6c-4654-4653-b2b2-f181239e69d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.549142] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1022.549142] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b2cf1-0078-edc2-e733-6df210ca23c4" [ 1022.549142] env[62914]: _type = "Task" [ 1022.549142] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.558386] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b2cf1-0078-edc2-e733-6df210ca23c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.597159] env[62914]: DEBUG oslo_vmware.api [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.503114} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.599910] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.600160] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1022.600373] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1022.600554] env[62914]: INFO nova.compute.manager [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1022.600799] env[62914]: DEBUG oslo.service.loopingcall [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.601016] env[62914]: DEBUG nova.compute.manager [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1022.601116] env[62914]: DEBUG nova.network.neutron [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1022.683860] env[62914]: DEBUG oslo_vmware.api [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Task: {'id': task-4832418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.53085} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.685700] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.685944] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1022.686210] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1022.686390] env[62914]: INFO nova.compute.manager [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1022.686618] env[62914]: DEBUG oslo.service.loopingcall [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.686826] env[62914]: DEBUG nova.compute.manager [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1022.686920] env[62914]: DEBUG nova.network.neutron [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1022.691733] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1022.692053] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d76fb87b-33be-43d4-aad5-0de4bf9ebc96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.702687] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1022.702687] env[62914]: value = "task-4832423" [ 1022.702687] env[62914]: _type = "Task" [ 1022.702687] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.718509] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.722573] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.723959] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1022.726243] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a5261d-5070-45d2-bea4-2f0fc76d899e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.738953] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.763132] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/bd973845-e7cf-4c5a-9a6b-3ae15ada9f64.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.769451] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75602be8-b8cc-4aa4-8955-d59da6d13464 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.805700] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832413, 'name': CloneVM_Task, 'duration_secs': 2.098989} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.807451] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Created linked-clone VM from snapshot [ 1022.808766] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1022.808766] env[62914]: value = "task-4832424" [ 1022.808766] env[62914]: _type = "Task" [ 1022.808766] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.808990] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d3d6d0-dfce-4172-bf5e-64c0ca433e34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.822611] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Uploading image 04d1e2dc-2bd1-433e-b7d2-80c799be344b {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1022.829256] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.857246] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.874394] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1022.874394] env[62914]: value = "vm-942039" [ 1022.874394] env[62914]: _type = "VirtualMachine" [ 1022.874394] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1022.875019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fd9d39f3-513c-4c43-aef6-c9718b8dce48 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.881901] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lease: (returnval){ [ 1022.881901] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e16714-d34f-8b31-d189-f1729fdcdf0e" [ 1022.881901] env[62914]: _type = "HttpNfcLease" [ 1022.881901] env[62914]: } obtained for exporting VM: (result){ [ 1022.881901] env[62914]: value = "vm-942039" [ 1022.881901] env[62914]: _type = "VirtualMachine" [ 1022.881901] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1022.882328] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the lease: (returnval){ [ 1022.882328] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e16714-d34f-8b31-d189-f1729fdcdf0e" [ 1022.882328] env[62914]: _type = "HttpNfcLease" [ 1022.882328] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1022.888980] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1022.888980] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e16714-d34f-8b31-d189-f1729fdcdf0e" [ 1022.888980] env[62914]: _type = "HttpNfcLease" [ 1022.888980] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1022.944983] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c7801c-93d6-4d47-b788-732f8521dcb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.975374] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5366c6b-c659-4c9c-9b94-7a509f0672f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.983723] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1023.062805] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b2cf1-0078-edc2-e733-6df210ca23c4, 'name': SearchDatastore_Task, 'duration_secs': 0.011888} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.063251] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.063448] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.063661] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.063810] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.063995] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.066860] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0148980d-c8bd-477b-9a60-cd15754b4dfb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.077805] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.078174] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1023.079182] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0bf1b58-3729-46e1-8b01-7e486a8193cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.091941] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1023.091941] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4950c-ad7d-cf39-9b52-278870d11bfc" [ 1023.091941] env[62914]: _type = "Task" [ 1023.091941] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.101655] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4950c-ad7d-cf39-9b52-278870d11bfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.164917] env[62914]: DEBUG nova.network.neutron [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updated VIF entry in instance network info cache for port bd8a334c-ccd2-4d47-8194-494527e06ae9. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1023.165286] env[62914]: DEBUG nova.network.neutron [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updating instance_info_cache with network_info: [{"id": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "address": "fa:16:3e:c1:a5:65", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd8a334c-cc", "ovs_interfaceid": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.214983] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832423, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.229604] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa1ebdd-83a3-4cda-b516-668b970e9ca5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.244655] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e25303e-18fd-4b88-8dbe-9be6c07cbdeb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.248163] env[62914]: DEBUG oslo_vmware.api [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832399, 'name': ReconfigVM_Task, 'duration_secs': 5.920815} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.248436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.248671] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Reconfigured VM to detach interface {{(pid=62914) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1023.283459] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e43eeb9-22cf-4548-9b00-570ceb31e11f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.293450] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63c9109-9c3c-41ad-be15-d6b9bbb09913 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.298489] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1023.314290] env[62914]: DEBUG nova.compute.provider_tree [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.326771] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832424, 'name': ReconfigVM_Task, 'duration_secs': 0.395035} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.327937] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfigured VM instance instance-0000005f to attach disk [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/bd973845-e7cf-4c5a-9a6b-3ae15ada9f64.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.332255] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-113d3d95-5269-49a5-884d-a76834cf7af6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.338303] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1023.338303] env[62914]: value = "task-4832426" [ 1023.338303] env[62914]: _type = "Task" [ 1023.338303] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.344950] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.345237] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.345409] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.345606] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.345773] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.345934] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.346164] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.346365] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.346560] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.346995] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.346995] env[62914]: DEBUG nova.virt.hardware [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.348215] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00265f4f-6fa3-4ac9-8a09-a37a1d3c3bad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.361902] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6ad2cf-8504-45dd-9a49-0f686fee673e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.366582] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832426, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.395288] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1023.395288] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e16714-d34f-8b31-d189-f1729fdcdf0e" [ 1023.395288] env[62914]: _type = "HttpNfcLease" [ 1023.395288] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1023.395288] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1023.395288] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e16714-d34f-8b31-d189-f1729fdcdf0e" [ 1023.395288] env[62914]: _type = "HttpNfcLease" [ 1023.395288] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1023.395288] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26b527c-0b9e-4c5d-82ac-fb3c6834a2e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.403480] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1023.403782] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1023.491261] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1023.491683] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d693fe8e-e9b2-4784-9b1e-a1feed685705 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.503408] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1023.503408] env[62914]: value = "task-4832427" [ 1023.503408] env[62914]: _type = "Task" [ 1023.503408] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.520827] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a2fee63b-de66-4607-914b-12e11f33b21a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.523120] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.569101] env[62914]: DEBUG nova.compute.manager [req-a0f4a563-b71b-4592-80c2-a46a7c6d222c req-06b94791-e0bd-4e86-8a5c-0464e3d4d3b3 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Received event network-vif-deleted-c3221de3-00d5-45e7-af68-04297360fbcf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1023.570833] env[62914]: INFO nova.compute.manager [req-a0f4a563-b71b-4592-80c2-a46a7c6d222c req-06b94791-e0bd-4e86-8a5c-0464e3d4d3b3 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Neutron deleted interface c3221de3-00d5-45e7-af68-04297360fbcf; detaching it from the instance and deleting it from the info cache [ 1023.570833] env[62914]: DEBUG nova.network.neutron [req-a0f4a563-b71b-4592-80c2-a46a7c6d222c req-06b94791-e0bd-4e86-8a5c-0464e3d4d3b3 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.604468] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b4950c-ad7d-cf39-9b52-278870d11bfc, 'name': SearchDatastore_Task, 'duration_secs': 0.012235} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.605156] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a92a04-697b-4523-b5dd-347f34646dbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.612486] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1023.612486] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52477f22-a162-efdb-0287-628944ffa732" [ 1023.612486] env[62914]: _type = "Task" [ 1023.612486] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.620927] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52477f22-a162-efdb-0287-628944ffa732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.668142] env[62914]: DEBUG oslo_concurrency.lockutils [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] Releasing lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.668542] env[62914]: DEBUG nova.compute.manager [req-c88cabc9-2685-4439-b54f-a5bb779ae6ac req-fc840350-b348-4c04-962b-e93d4ddbd569 service nova] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Received event network-vif-deleted-4ca49936-a41c-4418-a42a-114ca4faa7c4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1023.694179] env[62914]: DEBUG nova.network.neutron [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.714805] env[62914]: DEBUG oslo_vmware.api [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832423, 'name': PowerOnVM_Task, 'duration_secs': 0.903371} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.715765] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1023.715966] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5a14d0a3-1409-4852-945e-8e11b38f7ee4 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance 'ec73b924-e132-44b6-bc67-2b3c08592f03' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1023.820682] env[62914]: DEBUG nova.scheduler.client.report [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.858318] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832426, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.016274] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832427, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.016669] env[62914]: DEBUG nova.network.neutron [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.043146] env[62914]: DEBUG nova.compute.manager [req-cdfef9c9-fe5d-4aae-a75d-16c329e217ab req-fb1f8ba6-884f-4243-b848-6b024b9bd94f service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Received event network-vif-deleted-1559da30-bfec-4f82-9d1e-605294200ff3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1024.043510] env[62914]: INFO nova.compute.manager [req-cdfef9c9-fe5d-4aae-a75d-16c329e217ab req-fb1f8ba6-884f-4243-b848-6b024b9bd94f service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Neutron deleted interface 1559da30-bfec-4f82-9d1e-605294200ff3; detaching it from the instance and deleting it from the info cache [ 1024.043772] env[62914]: DEBUG nova.network.neutron [req-cdfef9c9-fe5d-4aae-a75d-16c329e217ab req-fb1f8ba6-884f-4243-b848-6b024b9bd94f service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.074076] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74fa433d-1b24-471a-abf9-e6719c6e3f2d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.086105] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a75457-5500-403e-a5a3-4a5be7c3dea1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.128676] env[62914]: DEBUG nova.compute.manager [req-a0f4a563-b71b-4592-80c2-a46a7c6d222c req-06b94791-e0bd-4e86-8a5c-0464e3d4d3b3 service nova] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Detach interface failed, port_id=c3221de3-00d5-45e7-af68-04297360fbcf, reason: Instance dc99b470-4334-408d-8853-d2e9b9204d04 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1024.139822] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52477f22-a162-efdb-0287-628944ffa732, 'name': SearchDatastore_Task, 'duration_secs': 0.012012} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.140479] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.140583] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/0c47848d-fcff-404d-8e84-e9fd09be9e9e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1024.140926] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c6d7637-75a2-4ecc-9ee7-ac682733ff23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.149568] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1024.149568] env[62914]: value = "task-4832428" [ 1024.149568] env[62914]: _type = "Task" [ 1024.149568] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.159660] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.200700] env[62914]: INFO nova.compute.manager [-] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Took 1.60 seconds to deallocate network for instance. [ 1024.207216] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Successfully updated port: 87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.327334] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.331495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.443s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.332163] env[62914]: DEBUG nova.objects.instance [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lazy-loading 'resources' on Instance uuid dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.359537] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832426, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.362716] env[62914]: INFO nova.scheduler.client.report [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted allocations for instance b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d [ 1024.515193] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832427, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.519870] env[62914]: INFO nova.compute.manager [-] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Took 1.83 seconds to deallocate network for instance. [ 1024.550278] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fee56404-4a32-4c75-a1a5-a527f633641d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.564718] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53779774-c50e-4090-b160-536b3e281a53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.622611] env[62914]: DEBUG nova.compute.manager [req-cdfef9c9-fe5d-4aae-a75d-16c329e217ab req-fb1f8ba6-884f-4243-b848-6b024b9bd94f service nova] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Detach interface failed, port_id=1559da30-bfec-4f82-9d1e-605294200ff3, reason: Instance 79c7728a-0452-44ec-91de-62e3f09f9183 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1024.662233] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832428, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.712657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.714315] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.714729] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.715396] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1024.855027] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832426, 'name': Rename_Task, 'duration_secs': 1.036239} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.855686] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1024.856040] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba45e672-9618-4df2-bcbf-2ce4537de24d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.864855] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1024.864855] env[62914]: value = "task-4832429" [ 1024.864855] env[62914]: _type = "Task" [ 1024.864855] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.875591] env[62914]: DEBUG oslo_concurrency.lockutils [None req-66901d23-9337-47e8-975a-eec7610bd314 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.302s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.886401] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.018125] env[62914]: DEBUG oslo_vmware.api [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832427, 'name': PowerOnVM_Task, 'duration_secs': 1.482256} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.018488] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1025.018704] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e7881f0c-6a0a-4b1d-a77d-7cddd423f489 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance '120fa16e-60cd-4326-b6c4-f1df419dbcb7' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1025.030368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.052811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.052811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquired lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.052811] env[62914]: DEBUG nova.network.neutron [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1025.163720] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68947} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.164491] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/0c47848d-fcff-404d-8e84-e9fd09be9e9e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1025.164983] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.201520] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14423bae-bf76-4c7e-9af6-8d2e414494e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.201520] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1025.201520] env[62914]: value = "task-4832430" [ 1025.201520] env[62914]: _type = "Task" [ 1025.201520] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.201520] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.247126] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.247350] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.247662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.247880] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.248073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.251747] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf3109f-854a-41df-ae55-dc3c2bd594f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.255221] env[62914]: INFO nova.compute.manager [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Terminating instance [ 1025.259195] env[62914]: DEBUG nova.compute.manager [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1025.259426] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1025.260349] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef9c6d8-282c-4437-ac89-f88a8b06d128 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.267178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b796ca-f8b3-4d71-b0ef-cdef6d227728 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.274187] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1025.275108] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec8ccdbf-19af-4472-9f13-6cb1f2559d07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.318117] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1025.322579] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b197893f-49f6-46fc-ae23-8be736b1166e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.326672] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1025.326672] env[62914]: value = "task-4832431" [ 1025.326672] env[62914]: _type = "Task" [ 1025.326672] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.339278] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb7f8c8-0da0-422d-8575-d759c0505a57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.351153] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.366468] env[62914]: DEBUG nova.compute.provider_tree [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.379035] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832429, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.494858] env[62914]: DEBUG nova.network.neutron [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Updating instance_info_cache with network_info: [{"id": "87adb6e9-f00f-4164-8903-0a82d32416ca", "address": "fa:16:3e:5e:5c:f5", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87adb6e9-f0", "ovs_interfaceid": "87adb6e9-f00f-4164-8903-0a82d32416ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.691954] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078017} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.692675] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1025.694590] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5a5441-e50e-4266-b71d-14d80f146112 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.733513] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/0c47848d-fcff-404d-8e84-e9fd09be9e9e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.737268] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8138db45-2e55-44a3-b877-7849ba5f0821 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.772161] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1025.772161] env[62914]: value = "task-4832432" [ 1025.772161] env[62914]: _type = "Task" [ 1025.772161] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.784858] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832432, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.807650] env[62914]: DEBUG nova.compute.manager [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Received event network-vif-plugged-87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1025.811089] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Acquiring lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.811089] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.811089] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.811089] env[62914]: DEBUG nova.compute.manager [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] No waiting events found dispatching network-vif-plugged-87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1025.811089] env[62914]: WARNING nova.compute.manager [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Received unexpected event network-vif-plugged-87adb6e9-f00f-4164-8903-0a82d32416ca for instance with vm_state building and task_state spawning. [ 1025.811089] env[62914]: DEBUG nova.compute.manager [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Received event network-changed-87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1025.811089] env[62914]: DEBUG nova.compute.manager [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Refreshing instance network info cache due to event network-changed-87adb6e9-f00f-4164-8903-0a82d32416ca. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1025.811089] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Acquiring lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.844187] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832431, 'name': PowerOffVM_Task, 'duration_secs': 0.470459} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.844437] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1025.844635] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1025.848019] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63c63453-d595-4865-8f8a-6ad18463b166 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.870381] env[62914]: DEBUG nova.scheduler.client.report [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1025.884675] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832429, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.933395] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1025.933675] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1025.934014] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleting the datastore file [datastore1] fa33e1a5-677a-489c-8c89-a33066b18103 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.934400] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85b6260a-4315-42dc-b731-50f1ed352781 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.946655] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1025.946655] env[62914]: value = "task-4832434" [ 1025.946655] env[62914]: _type = "Task" [ 1025.946655] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.953942] env[62914]: INFO nova.network.neutron [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Port b3e109fe-6c2f-407e-97fd-39b74b3bc4bc from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1025.954450] env[62914]: DEBUG nova.network.neutron [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [{"id": "de62c681-4ead-4636-8a49-3bcab66952b9", "address": "fa:16:3e:3d:2c:1a", "network": {"id": "9be47f79-b984-4fc2-a590-a80f36132ab1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1828291695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2562164f04b045a59b3b501d2b0014ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62c681-4e", "ovs_interfaceid": "de62c681-4ead-4636-8a49-3bcab66952b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.961715] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.998107] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.998658] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Instance network_info: |[{"id": "87adb6e9-f00f-4164-8903-0a82d32416ca", "address": "fa:16:3e:5e:5c:f5", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87adb6e9-f0", "ovs_interfaceid": "87adb6e9-f00f-4164-8903-0a82d32416ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1025.998886] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Acquired lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.999218] env[62914]: DEBUG nova.network.neutron [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Refreshing network info cache for port 87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1026.000996] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:5c:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87adb6e9-f00f-4164-8903-0a82d32416ca', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.011633] env[62914]: DEBUG oslo.service.loopingcall [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.015131] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1026.015868] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25d2f746-0fec-4430-92a7-25e8b873a86e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.053594] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.053594] env[62914]: value = "task-4832435" [ 1026.053594] env[62914]: _type = "Task" [ 1026.053594] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.066342] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832435, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.292828] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832432, 'name': ReconfigVM_Task, 'duration_secs': 0.445111} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.293337] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/0c47848d-fcff-404d-8e84-e9fd09be9e9e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.294200] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f966779e-eb3c-4e05-9c97-12316f27f4c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.301506] env[62914]: DEBUG nova.network.neutron [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Updated VIF entry in instance network info cache for port 87adb6e9-f00f-4164-8903-0a82d32416ca. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1026.301946] env[62914]: DEBUG nova.network.neutron [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Updating instance_info_cache with network_info: [{"id": "87adb6e9-f00f-4164-8903-0a82d32416ca", "address": "fa:16:3e:5e:5c:f5", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87adb6e9-f0", "ovs_interfaceid": "87adb6e9-f00f-4164-8903-0a82d32416ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.304744] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1026.304744] env[62914]: value = "task-4832436" [ 1026.304744] env[62914]: _type = "Task" [ 1026.304744] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.318044] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832436, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.380285] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.383854] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.915s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.386365] env[62914]: INFO nova.compute.claims [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.404686] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832429, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.411156] env[62914]: INFO nova.scheduler.client.report [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Deleted allocations for instance dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1 [ 1026.414811] env[62914]: DEBUG nova.network.neutron [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Port 94d0e4cd-493e-4e41-89dc-b0636889e9d9 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1026.414811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.414811] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.414811] env[62914]: DEBUG nova.network.neutron [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1026.460285] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.463385] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Releasing lock "refresh_cache-fa33e1a5-677a-489c-8c89-a33066b18103" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.572123] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832435, 'name': CreateVM_Task, 'duration_secs': 0.475996} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.572326] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1026.573101] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.573253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.573595] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1026.574026] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80a0d54d-9d51-4d36-a03e-b4c5e29dd98d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.579954] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1026.579954] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268d4bd-def4-b74a-0122-420db0b2b54d" [ 1026.579954] env[62914]: _type = "Task" [ 1026.579954] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.590985] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268d4bd-def4-b74a-0122-420db0b2b54d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.806712] env[62914]: DEBUG oslo_concurrency.lockutils [req-669783ea-5624-4544-9abc-db1e047b5aae req-0ef058fe-c3c9-4a6a-9511-e935024d5b02 service nova] Releasing lock "refresh_cache-417d4287-0f76-4d2e-b1da-43455d7ff3e6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.818070] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832436, 'name': Rename_Task, 'duration_secs': 0.221442} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.818398] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1026.818694] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65bc9647-9bad-4dab-b492-b6ea1366fc11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.827833] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1026.827833] env[62914]: value = "task-4832437" [ 1026.827833] env[62914]: _type = "Task" [ 1026.827833] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.840257] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832437, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.886081] env[62914]: DEBUG oslo_vmware.api [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832429, 'name': PowerOnVM_Task, 'duration_secs': 1.550516} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.886415] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1026.886652] env[62914]: INFO nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Took 10.77 seconds to spawn the instance on the hypervisor. [ 1026.886995] env[62914]: DEBUG nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1026.887724] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3397d4-1c37-4727-a5e2-53bdde9b0639 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.931567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7e2c53c3-4558-4338-8ef5-65ee875fdb6a tempest-ServersListShow296Test-2014224217 tempest-ServersListShow296Test-2014224217-project-member] Lock "dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.808s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.960171] env[62914]: DEBUG oslo_vmware.api [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.65912} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.960480] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.960672] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.960837] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.961025] env[62914]: INFO nova.compute.manager [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1026.961287] env[62914]: DEBUG oslo.service.loopingcall [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.961541] env[62914]: DEBUG nova.compute.manager [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1026.961678] env[62914]: DEBUG nova.network.neutron [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.968197] env[62914]: DEBUG oslo_concurrency.lockutils [None req-dc58c37a-e119-4539-9e12-b0e47031adcc tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "interface-fa33e1a5-677a-489c-8c89-a33066b18103-b3e109fe-6c2f-407e-97fd-39b74b3bc4bc" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.430s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.098141] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268d4bd-def4-b74a-0122-420db0b2b54d, 'name': SearchDatastore_Task, 'duration_secs': 0.016087} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.098521] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.098864] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.100510] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.100510] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.100510] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.100510] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-185ce088-33bf-47eb-95eb-5cf10c2c4322 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.111058] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.111361] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1027.113141] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa4cdaa9-5f1c-4691-a04e-6c23a625cc86 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.119826] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1027.119826] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a2018-7acb-42ba-b7ce-e69250c18523" [ 1027.119826] env[62914]: _type = "Task" [ 1027.119826] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.129778] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a2018-7acb-42ba-b7ce-e69250c18523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.341505] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832437, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.365200] env[62914]: DEBUG nova.network.neutron [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.412185] env[62914]: INFO nova.compute.manager [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Took 28.58 seconds to build instance. [ 1027.522658] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.522658] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.522658] env[62914]: DEBUG nova.compute.manager [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Going to confirm migration 6 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1027.634612] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a2018-7acb-42ba-b7ce-e69250c18523, 'name': SearchDatastore_Task, 'duration_secs': 0.014899} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.635178] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d9a3cb6-a6c1-4395-aa80-616f0af2fa92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.644175] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1027.644175] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5206bff8-f28b-0cd1-0de7-f022155c222f" [ 1027.644175] env[62914]: _type = "Task" [ 1027.644175] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.656332] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5206bff8-f28b-0cd1-0de7-f022155c222f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.801789] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee896cbe-2329-46ba-a41c-eba3f8e2f2f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.811549] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfd1323-e1bd-4f60-ab74-d15d03c73bb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.859050] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0894f052-1573-4685-b3e2-44131d0cc03c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.867855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.869558] env[62914]: DEBUG oslo_vmware.api [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832437, 'name': PowerOnVM_Task, 'duration_secs': 0.692109} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.872203] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1027.872531] env[62914]: INFO nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1027.872794] env[62914]: DEBUG nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1027.873999] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d4e687-b72d-4977-a670-f67bebf91bef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.878015] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237e13e6-59d9-4fdd-9084-edfebb78db1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.895358] env[62914]: DEBUG nova.compute.provider_tree [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.914242] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7be5b1b7-2e99-4eca-8f74-16582213ea1e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.093s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.101955] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.102274] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.102347] env[62914]: DEBUG nova.network.neutron [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1028.102533] env[62914]: DEBUG nova.objects.instance [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lazy-loading 'info_cache' on Instance uuid 120fa16e-60cd-4326-b6c4-f1df419dbcb7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.156053] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5206bff8-f28b-0cd1-0de7-f022155c222f, 'name': SearchDatastore_Task, 'duration_secs': 0.024059} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.156369] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.156647] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 417d4287-0f76-4d2e-b1da-43455d7ff3e6/417d4287-0f76-4d2e-b1da-43455d7ff3e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1028.156937] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5a330a3-c8eb-4513-a23b-d4f2b65fcb9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.165382] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1028.165382] env[62914]: value = "task-4832438" [ 1028.165382] env[62914]: _type = "Task" [ 1028.165382] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.177378] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.375261] env[62914]: DEBUG nova.compute.manager [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62914) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1028.401668] env[62914]: DEBUG nova.scheduler.client.report [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.412075] env[62914]: INFO nova.compute.manager [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Took 28.17 seconds to build instance. [ 1028.515817] env[62914]: DEBUG nova.network.neutron [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.637021] env[62914]: DEBUG nova.compute.manager [req-ca42f71f-23e5-45a2-8b3f-018b42d5191d req-14131f4e-a564-48a1-98d5-63ccfbe54d52 service nova] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Received event network-vif-deleted-de62c681-4ead-4636-8a49-3bcab66952b9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1028.678140] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832438, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.912846] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.913835] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1028.919067] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.062s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.919406] env[62914]: DEBUG nova.objects.instance [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lazy-loading 'resources' on Instance uuid 55192659-4d65-4e74-a47f-46d650b6b095 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.920806] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5e8e4dc4-0203-47bd-a9cb-27eaa881ba9e tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.691s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.020913] env[62914]: INFO nova.compute.manager [-] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Took 2.06 seconds to deallocate network for instance. [ 1029.177706] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.816947} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.178059] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 417d4287-0f76-4d2e-b1da-43455d7ff3e6/417d4287-0f76-4d2e-b1da-43455d7ff3e6.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1029.178293] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.178634] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-141a78f0-2ba8-4849-8508-1190560876f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.186683] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1029.186683] env[62914]: value = "task-4832439" [ 1029.186683] env[62914]: _type = "Task" [ 1029.186683] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.201998] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.359117] env[62914]: DEBUG nova.network.neutron [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [{"id": "2295762d-8e27-469d-a292-9ef453b210d6", "address": "fa:16:3e:65:01:15", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295762d-8e", "ovs_interfaceid": "2295762d-8e27-469d-a292-9ef453b210d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.426171] env[62914]: DEBUG nova.compute.utils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.428208] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1029.428388] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.471348] env[62914]: DEBUG nova.policy [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1029.494521] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.528698] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.709768] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089072} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.710213] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.711550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb77842e-c52c-48f9-83b4-c23d74c4d289 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.744318] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 417d4287-0f76-4d2e-b1da-43455d7ff3e6/417d4287-0f76-4d2e-b1da-43455d7ff3e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.747596] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-973d1e37-fb2d-4e38-83a2-50e140765ac8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.770325] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1029.770325] env[62914]: value = "task-4832440" [ 1029.770325] env[62914]: _type = "Task" [ 1029.770325] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.781238] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.797115] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d9a9d9-5eb1-4d49-843f-fc715ea192ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.805956] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e533cbb-fec5-4634-b6d7-097bdc731a16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.849019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7049044-75c2-43f9-a633-d5700286383c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.858081] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c055c7f5-b3e3-40f5-89b4-940cb117e434 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.862975] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-120fa16e-60cd-4326-b6c4-f1df419dbcb7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.863305] env[62914]: DEBUG nova.objects.instance [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lazy-loading 'migration_context' on Instance uuid 120fa16e-60cd-4326-b6c4-f1df419dbcb7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.865286] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Successfully created port: 689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.880504] env[62914]: INFO nova.compute.manager [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Rescuing [ 1029.880504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.880504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.880504] env[62914]: DEBUG nova.network.neutron [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.885267] env[62914]: DEBUG nova.compute.provider_tree [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.932202] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1030.282920] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.369097] env[62914]: DEBUG nova.objects.base [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Object Instance<120fa16e-60cd-4326-b6c4-f1df419dbcb7> lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1030.370742] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65470b0e-6ea2-43d8-a84d-69b4270869ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.399255] env[62914]: DEBUG nova.scheduler.client.report [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1030.404606] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce6ea73-f296-4853-b521-fe8210bac27d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.412659] env[62914]: DEBUG oslo_vmware.api [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1030.412659] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c410f9-adf5-2aae-9a6b-72188ba0b69b" [ 1030.412659] env[62914]: _type = "Task" [ 1030.412659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.426487] env[62914]: DEBUG oslo_vmware.api [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c410f9-adf5-2aae-9a6b-72188ba0b69b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.644264] env[62914]: DEBUG nova.network.neutron [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updating instance_info_cache with network_info: [{"id": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "address": "fa:16:3e:c1:a5:65", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd8a334c-cc", "ovs_interfaceid": "bd8a334c-ccd2-4d47-8194-494527e06ae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.786788] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832440, 'name': ReconfigVM_Task, 'duration_secs': 0.518544} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.787276] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 417d4287-0f76-4d2e-b1da-43455d7ff3e6/417d4287-0f76-4d2e-b1da-43455d7ff3e6.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.788666] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b9c87c9-9bee-4330-8333-faf6487d9c58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.797482] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1030.797482] env[62914]: value = "task-4832441" [ 1030.797482] env[62914]: _type = "Task" [ 1030.797482] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.809318] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832441, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.910903] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.913916] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.202s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.914264] env[62914]: DEBUG nova.objects.instance [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'resources' on Instance uuid dc99b470-4334-408d-8853-d2e9b9204d04 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.927188] env[62914]: DEBUG oslo_vmware.api [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c410f9-adf5-2aae-9a6b-72188ba0b69b, 'name': SearchDatastore_Task, 'duration_secs': 0.013232} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.928321] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.944923] env[62914]: INFO nova.scheduler.client.report [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted allocations for instance 55192659-4d65-4e74-a47f-46d650b6b095 [ 1030.949672] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1031.147332] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-0c47848d-fcff-404d-8e84-e9fd09be9e9e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.309465] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832441, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.459137] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a49a008b-1852-46d5-a2a0-3e7336c3cd60 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "55192659-4d65-4e74-a47f-46d650b6b095" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.916s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.637347] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Successfully updated port: 689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.740864] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef75ddb-7ac5-48d6-a9f9-ae96beec9d9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.750835] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556a412b-46bd-4a41-8f18-b0de3ad11c7e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.786042] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837bb4f9-f679-4a32-ad2d-9d10dd443529 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.794507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb06e1a-d2ec-43d7-9411-e1fb925e04c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.810393] env[62914]: DEBUG nova.compute.provider_tree [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.818898] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832441, 'name': Rename_Task, 'duration_secs': 0.662371} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.819247] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1031.819545] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-404bc604-1afd-4952-8988-f0e4198ac785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.828331] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1031.828331] env[62914]: value = "task-4832442" [ 1031.828331] env[62914]: _type = "Task" [ 1031.828331] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.837776] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.141458] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.141710] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.141910] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.314117] env[62914]: DEBUG nova.scheduler.client.report [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.343660] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832442, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.683768] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1032.820957] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.823469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.794s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.823734] env[62914]: DEBUG nova.objects.instance [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lazy-loading 'resources' on Instance uuid 79c7728a-0452-44ec-91de-62e3f09f9183 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.839051] env[62914]: DEBUG oslo_vmware.api [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832442, 'name': PowerOnVM_Task, 'duration_secs': 0.753333} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.840514] env[62914]: INFO nova.scheduler.client.report [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted allocations for instance dc99b470-4334-408d-8853-d2e9b9204d04 [ 1032.841700] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1032.841700] env[62914]: INFO nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1032.841857] env[62914]: DEBUG nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1032.845394] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87c5ed0-a90c-4529-a4e1-35eca2ef986b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.868755] env[62914]: DEBUG nova.network.neutron [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating instance_info_cache with network_info: [{"id": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "address": "fa:16:3e:e8:5c:f2", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap689aba7f-31", "ovs_interfaceid": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.356245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-c701f95c-9bd0-453d-9f37-d67f108ad90f tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "dc99b470-4334-408d-8853-d2e9b9204d04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.116s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.364963] env[62914]: INFO nova.compute.manager [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Took 30.39 seconds to build instance. [ 1033.371426] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.371904] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance network_info: |[{"id": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "address": "fa:16:3e:e8:5c:f2", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap689aba7f-31", "ovs_interfaceid": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1033.602859] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dcb23c-c2c9-4f97-a79d-9d46c2bec35a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.611455] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f12bc5-ffde-4ec2-8daf-67a78127c3b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.644507] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6394a9c-ad16-4f7c-986f-110ef406c58d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.652648] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d05625-81ce-43c6-be95-736628f919c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.667124] env[62914]: DEBUG nova.compute.provider_tree [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.867413] env[62914]: DEBUG oslo_concurrency.lockutils [None req-087ad19b-48e3-4dae-b120-5d2eb4137e11 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.899s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.170813] env[62914]: DEBUG nova.scheduler.client.report [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1034.185116] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1034.185116] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1034.185288] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.185386] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1034.185561] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.185819] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1034.186101] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1034.186322] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1034.186539] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1034.186725] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1034.186911] env[62914]: DEBUG nova.virt.hardware [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1034.189518] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c64624f-bd5a-478b-9ff9-d0572bcc910c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.206841] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa4d83f-ae08-4ee3-987b-dc774f39c853 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.214511] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1034.216115] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbc3068-5241-4ca4-8cff-b1751042257d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.223725] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1034.223987] env[62914]: ERROR oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk due to incomplete transfer. [ 1034.232334] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aa3f8055-447a-4cdb-9e46-218835bc68b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.234718] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:5c:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '689aba7f-31af-4116-8b4e-bcec10c9c5ba', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.242839] env[62914]: DEBUG oslo.service.loopingcall [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.243481] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1034.244214] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8226b284-3309-48e8-b83c-9cf22d46c86f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.261377] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212eec4-ad69-82f7-d8ef-418dac482919/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1034.261754] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Uploaded image 04d1e2dc-2bd1-433e-b7d2-80c799be344b to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1034.263492] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1034.264248] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cb377f17-105a-4feb-a0b2-1e301421d468 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.269671] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.269671] env[62914]: value = "task-4832443" [ 1034.269671] env[62914]: _type = "Task" [ 1034.269671] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.274652] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1034.274652] env[62914]: value = "task-4832444" [ 1034.274652] env[62914]: _type = "Task" [ 1034.274652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.281589] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832443, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.286939] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832444, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.675705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.679083] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 5.184s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.698377] env[62914]: INFO nova.scheduler.client.report [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Deleted allocations for instance 79c7728a-0452-44ec-91de-62e3f09f9183 [ 1034.709450] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1034.709640] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-496aa680-48cd-4f70-8233-b26a43253ef4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.719179] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1034.719179] env[62914]: value = "task-4832445" [ 1034.719179] env[62914]: _type = "Task" [ 1034.719179] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.732660] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.788042] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832443, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.791648] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832444, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.173774] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "4911baea-15df-46db-be11-fcf998eb0cb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.174168] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.174404] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.174611] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.174799] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.178057] env[62914]: INFO nova.compute.manager [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Terminating instance [ 1035.181494] env[62914]: DEBUG nova.compute.manager [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1035.181867] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1035.182790] env[62914]: DEBUG nova.objects.instance [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'migration_context' on Instance uuid ec73b924-e132-44b6-bc67-2b3c08592f03 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.184820] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0645acb1-2c3c-4fd3-9c85-eb73488aeb02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.195902] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1035.198157] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbd84b5e-571f-499f-90bf-fb880cc4c6b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.209058] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1035.209058] env[62914]: value = "task-4832446" [ 1035.209058] env[62914]: _type = "Task" [ 1035.209058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.210021] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6898848a-17f6-45a7-a811-53c4fdb4068d tempest-ImagesTestJSON-652168268 tempest-ImagesTestJSON-652168268-project-member] Lock "79c7728a-0452-44ec-91de-62e3f09f9183" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.186s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.222908] env[62914]: DEBUG nova.compute.manager [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Received event network-vif-plugged-689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1035.222908] env[62914]: DEBUG oslo_concurrency.lockutils [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.222908] env[62914]: DEBUG oslo_concurrency.lockutils [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] Lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.222908] env[62914]: DEBUG oslo_concurrency.lockutils [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] Lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.222908] env[62914]: DEBUG nova.compute.manager [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] No waiting events found dispatching network-vif-plugged-689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1035.222908] env[62914]: WARNING nova.compute.manager [req-d1fab05a-26f4-4558-84e4-e4d72b413a8c req-5029e01c-7cfd-4d88-8fd0-16932e559ccc service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Received unexpected event network-vif-plugged-689aba7f-31af-4116-8b4e-bcec10c9c5ba for instance with vm_state building and task_state spawning. [ 1035.235012] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.235012] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.237269] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.245309] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832445, 'name': PowerOffVM_Task, 'duration_secs': 0.264944} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.247703] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1035.248180] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1035.249102] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfccdcf-3928-418f-bd3b-f23257280387 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.255181] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d232291-2399-4794-b361-ed5825383f9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.262791] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "b285198b-aa95-4dcb-99b3-531d09c210d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.263057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.267472] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1035.267674] env[62914]: ERROR oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk due to incomplete transfer. [ 1035.282214] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e1096b9a-c926-4212-ac98-89e0b7412acc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.292668] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21c65f2-d36b-4680-9d6c-856a5a36ae28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.308664] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832444, 'name': Destroy_Task, 'duration_secs': 0.663177} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.315562] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Destroyed the VM [ 1035.316054] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1035.316492] env[62914]: DEBUG oslo_vmware.rw_handles [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52489de9-1884-0263-183a-6645458721ea/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1035.316784] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Uploaded image 4d62b4b6-d832-4dbd-be0d-027df1fdaff5 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1035.319929] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1035.320712] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832443, 'name': CreateVM_Task, 'duration_secs': 0.647895} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.320961] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2d0e5ff3-b193-411f-9c81-6bb82dd79f88 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.323335] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-56e21857-d6c3-410a-9b7b-0618a396125e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.325386] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1035.326138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.326288] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.326628] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.327609] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ac58a09-df2f-40bd-9895-43b5ec28ee67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.333403] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1035.333403] env[62914]: value = "task-4832447" [ 1035.333403] env[62914]: _type = "Task" [ 1035.333403] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.337690] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1035.337690] env[62914]: value = "task-4832448" [ 1035.337690] env[62914]: _type = "Task" [ 1035.337690] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.337978] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1035.337978] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520ca7d9-a726-b321-e902-5a4e7eed1406" [ 1035.337978] env[62914]: _type = "Task" [ 1035.337978] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.349738] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1035.350479] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f060ef52-a8eb-4fe7-ac7f-bbf8fd01f448 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.356724] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832447, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.363489] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520ca7d9-a726-b321-e902-5a4e7eed1406, 'name': SearchDatastore_Task, 'duration_secs': 0.022667} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.363744] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832448, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.365179] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.365435] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.365991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.365991] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.365991] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.366329] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1035.366329] env[62914]: value = "task-4832449" [ 1035.366329] env[62914]: _type = "Task" [ 1035.366329] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.366550] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d8a5cd4-7abd-40c8-9a23-ca28e9553a67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.378553] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1035.378811] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.379125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.379287] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.379470] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.380335] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a668a8a8-2c9a-4cab-8269-68cb52ac2c95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.382299] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.382484] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1035.383249] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2a3bfc1-4991-486c-b370-69e12f2ef6c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.392388] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1035.392388] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525341a7-144f-ad9f-b559-eb50e3948379" [ 1035.392388] env[62914]: _type = "Task" [ 1035.392388] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.393677] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.393861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1035.397464] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-605b0acd-19ef-4513-9e72-14ca46a09dce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.405263] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525341a7-144f-ad9f-b559-eb50e3948379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.406582] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1035.406582] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521878e1-5dc2-c450-d61e-170a8ed32c33" [ 1035.406582] env[62914]: _type = "Task" [ 1035.406582] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.415168] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521878e1-5dc2-c450-d61e-170a8ed32c33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.731310] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832446, 'name': PowerOffVM_Task, 'duration_secs': 0.389193} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.731310] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1035.731838] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1035.732263] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf6d8b24-99ef-4c32-88b9-4cde9c510651 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.741727] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1035.766179] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1035.808854] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1035.809141] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1035.809332] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore1] 4911baea-15df-46db-be11-fcf998eb0cb6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.809675] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06adc6b9-9688-4d90-b319-c383722ae268 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.818994] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1035.818994] env[62914]: value = "task-4832451" [ 1035.818994] env[62914]: _type = "Task" [ 1035.818994] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.830530] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.857752] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832448, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.858053] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832447, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.905465] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525341a7-144f-ad9f-b559-eb50e3948379, 'name': SearchDatastore_Task, 'duration_secs': 0.017726} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.906449] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1567a198-1268-47ef-9d52-d2176b00970d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.929322] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1035.929322] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529581d3-cf86-c562-9f0e-e3160a13a8a5" [ 1035.929322] env[62914]: _type = "Task" [ 1035.929322] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.929637] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521878e1-5dc2-c450-d61e-170a8ed32c33, 'name': SearchDatastore_Task, 'duration_secs': 0.011066} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.937437] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff5dde4-aa56-4c00-b92f-fafd41470d7d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.946387] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529581d3-cf86-c562-9f0e-e3160a13a8a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.948080] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1035.948080] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c6735-0999-aaf7-907b-0626a12bdf29" [ 1035.948080] env[62914]: _type = "Task" [ 1035.948080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.960386] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c6735-0999-aaf7-907b-0626a12bdf29, 'name': SearchDatastore_Task, 'duration_secs': 0.011368} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.960680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.962644] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1035.962644] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b049415b-0620-45e8-a265-4152167a4973 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.975072] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1035.975072] env[62914]: value = "task-4832452" [ 1035.975072] env[62914]: _type = "Task" [ 1035.975072] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.987183] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.077866] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf14a85-1321-48d5-b728-b0af08f4b1de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.087030] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c029aa6-a11c-4c14-bff0-620f768110ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.121809] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503d636a-d013-406d-b16a-7d76de167d68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.130393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0be37d-0f9a-4a43-b604-d2570ea9604f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.146599] env[62914]: DEBUG nova.compute.provider_tree [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.266184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.290733] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.329925] env[62914]: DEBUG oslo_vmware.api [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442069} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.330799] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.331058] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1036.331260] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1036.331447] env[62914]: INFO nova.compute.manager [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1036.331770] env[62914]: DEBUG oslo.service.loopingcall [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.332036] env[62914]: DEBUG nova.compute.manager [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1036.332126] env[62914]: DEBUG nova.network.neutron [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1036.350834] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832447, 'name': RemoveSnapshot_Task, 'duration_secs': 0.532428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.354367] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1036.354874] env[62914]: DEBUG nova.compute.manager [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1036.355247] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832448, 'name': Destroy_Task, 'duration_secs': 0.580999} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.355982] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5cf2e5-40ef-4e4d-b48e-df22efbeb757 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.358833] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Destroyed the VM [ 1036.359094] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1036.359355] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f887b2f8-3f55-4706-b8df-121ec83f631a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.369979] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1036.369979] env[62914]: value = "task-4832453" [ 1036.369979] env[62914]: _type = "Task" [ 1036.369979] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.378363] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832453, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.443959] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529581d3-cf86-c562-9f0e-e3160a13a8a5, 'name': SearchDatastore_Task, 'duration_secs': 0.025865} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.444327] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.444731] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1036.445224] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2556dbdd-550d-40a9-9c81-8ed41362a589 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.455449] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1036.455449] env[62914]: value = "task-4832454" [ 1036.455449] env[62914]: _type = "Task" [ 1036.455449] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.467510] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.491272] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832452, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.595104] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "da2af7d4-f311-444a-aa9f-0744e698defb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.595375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.649604] env[62914]: DEBUG nova.scheduler.client.report [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.880413] env[62914]: INFO nova.compute.manager [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Shelve offloading [ 1036.887021] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832453, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.887861] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1036.888244] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63dfa412-50cb-4396-8138-9c802ee7d475 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.896328] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1036.896328] env[62914]: value = "task-4832455" [ 1036.896328] env[62914]: _type = "Task" [ 1036.896328] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.909809] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1036.910174] env[62914]: DEBUG nova.compute.manager [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1036.911411] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556587b8-e8ab-4d8b-b64f-7bed2f778b5f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.920496] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.920775] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.924527] env[62914]: DEBUG nova.network.neutron [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1036.972957] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832454, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.989814] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840939} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.991151] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 1036.992192] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf09191-c83e-40a9-af78-ddb75320cfed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.025919] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.027115] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7300666c-71b6-4919-aa99-7447219edea0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.050360] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1037.050360] env[62914]: value = "task-4832456" [ 1037.050360] env[62914]: _type = "Task" [ 1037.050360] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.065349] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.101070] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1037.251571] env[62914]: DEBUG nova.compute.manager [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Received event network-changed-689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1037.251941] env[62914]: DEBUG nova.compute.manager [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Refreshing instance network info cache due to event network-changed-689aba7f-31af-4116-8b4e-bcec10c9c5ba. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1037.252220] env[62914]: DEBUG oslo_concurrency.lockutils [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] Acquiring lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.252451] env[62914]: DEBUG oslo_concurrency.lockutils [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] Acquired lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.252684] env[62914]: DEBUG nova.network.neutron [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Refreshing network info cache for port 689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1037.382895] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832453, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.470387] env[62914]: DEBUG nova.network.neutron [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.475022] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832454, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.862067} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.475022] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1037.475022] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.475022] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6600c0a4-225e-4184-8964-3796d8ee53e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.486297] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1037.486297] env[62914]: value = "task-4832457" [ 1037.486297] env[62914]: _type = "Task" [ 1037.486297] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.498341] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832457, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.563474] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832456, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.633541] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.661497] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.983s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.674095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.145s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.674384] env[62914]: DEBUG nova.objects.instance [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'resources' on Instance uuid fa33e1a5-677a-489c-8c89-a33066b18103 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.841572] env[62914]: DEBUG nova.network.neutron [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.882693] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832453, 'name': RemoveSnapshot_Task, 'duration_secs': 1.233143} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.883058] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1037.883384] env[62914]: DEBUG nova.compute.manager [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1037.884182] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9992898b-8c93-4f94-b2eb-4b09bfd140d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.973297] env[62914]: INFO nova.compute.manager [-] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Took 1.64 seconds to deallocate network for instance. [ 1037.999321] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832457, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108637} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.999694] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.001282] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25f3eb4-5eec-4e62-babc-24d443733667 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.028581] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.029047] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cd99c8b-5cb8-4da2-bc6e-c3f212602cf2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.050728] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1038.050728] env[62914]: value = "task-4832458" [ 1038.050728] env[62914]: _type = "Task" [ 1038.050728] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.066041] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832456, 'name': ReconfigVM_Task, 'duration_secs': 0.755252} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.070925] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.070925] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832458, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.071421] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe937c9-97f4-46e8-9041-2f2d5395bbcb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.106563] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34956067-2cf0-4bce-86f2-549132e82f08 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.124964] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1038.124964] env[62914]: value = "task-4832459" [ 1038.124964] env[62914]: _type = "Task" [ 1038.124964] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.134679] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.346104] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.365013] env[62914]: DEBUG nova.network.neutron [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updated VIF entry in instance network info cache for port 689aba7f-31af-4116-8b4e-bcec10c9c5ba. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1038.365426] env[62914]: DEBUG nova.network.neutron [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating instance_info_cache with network_info: [{"id": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "address": "fa:16:3e:e8:5c:f2", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap689aba7f-31", "ovs_interfaceid": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.401776] env[62914]: INFO nova.compute.manager [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Shelve offloading [ 1038.404101] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1038.404430] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a26d9e8d-582e-4de9-af84-742fbbfd0145 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.416684] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1038.416684] env[62914]: value = "task-4832460" [ 1038.416684] env[62914]: _type = "Task" [ 1038.416684] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.435642] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1038.435868] env[62914]: DEBUG nova.compute.manager [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1038.437362] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12514d91-1002-4b6d-9824-0b0395509b75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.444136] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.444587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.444587] env[62914]: DEBUG nova.network.neutron [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1038.483069] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.546529] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff24b70-f2c4-4e31-9bbf-3307280636f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.558321] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34096607-d776-449c-bab5-a1b93f58bf74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.565684] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.605205] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d680d5-e04e-49c3-9398-d28727221faa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.615365] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50809098-b087-4cbb-85be-e4eb60500f4d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.630858] env[62914]: DEBUG nova.compute.provider_tree [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.640636] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832459, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.693929] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1038.694869] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b1830-fd08-44e0-9939-5c12428700ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.703460] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1038.703551] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcba2f98-3189-47b5-92ea-f66e6145120d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.781865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1038.782193] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1038.782434] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleting the datastore file [datastore2] 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.782819] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31cc4fcb-b57c-48dd-9156-ac552f12e454 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.794292] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1038.794292] env[62914]: value = "task-4832462" [ 1038.794292] env[62914]: _type = "Task" [ 1038.794292] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.804325] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.868232] env[62914]: DEBUG oslo_concurrency.lockutils [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] Releasing lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.868456] env[62914]: DEBUG nova.compute.manager [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Received event network-vif-deleted-b8eb6717-ecdc-4bbe-ad47-b975cf486bfa {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1038.868674] env[62914]: INFO nova.compute.manager [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Neutron deleted interface b8eb6717-ecdc-4bbe-ad47-b975cf486bfa; detaching it from the instance and deleting it from the info cache [ 1038.868866] env[62914]: DEBUG nova.network.neutron [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.974931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.975386] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.062162] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832458, 'name': ReconfigVM_Task, 'duration_secs': 0.982561} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.062479] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.063089] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33931dda-973a-47eb-8e25-4784c7f62b36 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.069947] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1039.069947] env[62914]: value = "task-4832463" [ 1039.069947] env[62914]: _type = "Task" [ 1039.069947] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.081249] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832463, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.137256] env[62914]: DEBUG nova.scheduler.client.report [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.146825] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832459, 'name': ReconfigVM_Task, 'duration_secs': 0.521981} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.146825] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1039.147131] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74f41d0d-3d46-41b7-b635-df06903edd33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.154636] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1039.154636] env[62914]: value = "task-4832464" [ 1039.154636] env[62914]: _type = "Task" [ 1039.154636] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.166450] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832464, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.209354] env[62914]: DEBUG nova.network.neutron [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updating instance_info_cache with network_info: [{"id": "c64b5774-e946-4217-a170-f93b64d5070b", "address": "fa:16:3e:c6:f5:e1", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64b5774-e9", "ovs_interfaceid": "c64b5774-e946-4217-a170-f93b64d5070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.211441] env[62914]: INFO nova.compute.manager [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Swapping old allocation on dict_keys(['f2f7a014-852b-4b37-9610-c5761f4b0175']) held by migration b9c9eba1-369b-4782-a7b7-d155b21c313d for instance [ 1039.235874] env[62914]: DEBUG nova.scheduler.client.report [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Overwriting current allocation {'allocations': {'f2f7a014-852b-4b37-9610-c5761f4b0175': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 129}}, 'project_id': '894c73ea90624428afeb1165afbbfa9c', 'user_id': '8a8cfcd0aed9499a83c09052328647cb', 'consumer_generation': 1} on consumer ec73b924-e132-44b6-bc67-2b3c08592f03 {{(pid=62914) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1039.287570] env[62914]: DEBUG nova.compute.manager [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-vif-unplugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1039.287570] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.287570] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.287570] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.287570] env[62914]: DEBUG nova.compute.manager [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] No waiting events found dispatching network-vif-unplugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1039.287570] env[62914]: WARNING nova.compute.manager [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received unexpected event network-vif-unplugged-00706251-f634-4dcb-9705-105152de241f for instance with vm_state shelved and task_state shelving_offloading. [ 1039.287570] env[62914]: DEBUG nova.compute.manager [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-changed-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1039.287570] env[62914]: DEBUG nova.compute.manager [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing instance network info cache due to event network-changed-00706251-f634-4dcb-9705-105152de241f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1039.287570] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.287570] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.287570] env[62914]: DEBUG nova.network.neutron [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing network info cache for port 00706251-f634-4dcb-9705-105152de241f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1039.306420] env[62914]: DEBUG oslo_vmware.api [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141208} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.306727] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.306919] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1039.307113] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1039.327822] env[62914]: INFO nova.scheduler.client.report [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted allocations for instance 455965de-816d-4ab2-9d5e-a12b06893e6f [ 1039.366752] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.367015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.367254] env[62914]: DEBUG nova.network.neutron [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1039.372265] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5dd2da8-c965-439f-86fe-461b85f71d0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.382439] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d4ee52-c54f-48bc-a335-e14cb4fb0718 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.418726] env[62914]: DEBUG nova.compute.manager [req-91fac4e5-1c34-4cab-bf8b-456c9b16eceb req-970d2b81-896f-407a-8902-e833c6eef5ce service nova] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Detach interface failed, port_id=b8eb6717-ecdc-4bbe-ad47-b975cf486bfa, reason: Instance 4911baea-15df-46db-be11-fcf998eb0cb6 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1039.478331] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1039.581572] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832463, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.642783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.645375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.717s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.661586] env[62914]: INFO nova.scheduler.client.report [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted allocations for instance fa33e1a5-677a-489c-8c89-a33066b18103 [ 1039.672503] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832464, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.714736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.834890] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.009383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.087365] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832463, 'name': Rename_Task, 'duration_secs': 0.71504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.090139] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1040.090457] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47f6ce08-79ac-4012-b2ca-064500f42bf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.099809] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1040.099809] env[62914]: value = "task-4832465" [ 1040.099809] env[62914]: _type = "Task" [ 1040.099809] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.112555] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.170309] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832464, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.174894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b0a444e1-6761-4fe9-86e9-8619fbc36318 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "fa33e1a5-677a-489c-8c89-a33066b18103" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.927s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.216641] env[62914]: DEBUG nova.network.neutron [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updated VIF entry in instance network info cache for port 00706251-f634-4dcb-9705-105152de241f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1040.216982] env[62914]: DEBUG nova.network.neutron [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": null, "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap00706251-f6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.255979] env[62914]: DEBUG nova.network.neutron [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [{"id": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "address": "fa:16:3e:a9:9e:4b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d0e4cd-49", "ovs_interfaceid": "94d0e4cd-493e-4e41-89dc-b0636889e9d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.304941] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.305992] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4186b78-040c-4c3a-9dab-c29e483fa95f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.325371] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1040.326224] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cadc18f-391a-4295-8988-01cfff184450 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.493174] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ce6f10-cc41-4814-a407-adfda561341a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.502590] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b8f254-ab6a-47b1-ae8e-5c9a21d4149a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.539702] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299a4b10-a29e-48cd-9206-7279e21bafb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.548786] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a47c32b-f866-4871-ab58-520448223cd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.565874] env[62914]: DEBUG nova.compute.provider_tree [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.611660] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832465, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.667648] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832464, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.723741] env[62914]: DEBUG oslo_concurrency.lockutils [req-af33b559-a71c-47c0-83a4-8239bf746cf0 req-0109295a-3bb4-402c-9f20-3646c4c79630 service nova] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.759870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-ec73b924-e132-44b6-bc67-2b3c08592f03" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.761091] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c027d85-178b-41ef-aa6b-f2d01e2274bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.769815] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea34bfc-de88-4a72-b2fa-40d089d968e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.821314] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.821634] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.821948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "aedc785f-619f-4b9f-850f-790f84e57577-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.822179] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.822451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.826007] env[62914]: INFO nova.compute.manager [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Terminating instance [ 1040.831121] env[62914]: DEBUG nova.compute.manager [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1040.831121] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.831121] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b8dc69-e819-4135-8742-40b846db66dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.840382] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1040.840740] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4799e1a5-aa75-45d4-bbd1-318dab4337d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.849557] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1040.849557] env[62914]: value = "task-4832467" [ 1040.849557] env[62914]: _type = "Task" [ 1040.849557] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.860905] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.071148] env[62914]: DEBUG nova.scheduler.client.report [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.113217] env[62914]: DEBUG oslo_vmware.api [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832465, 'name': PowerOnVM_Task, 'duration_secs': 1.006176} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.113513] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1041.113730] env[62914]: INFO nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1041.113922] env[62914]: DEBUG nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1041.114786] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2047c21e-9ea9-4aef-b92b-9b0e11ed7241 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.169789] env[62914]: DEBUG oslo_vmware.api [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832464, 'name': PowerOnVM_Task, 'duration_secs': 1.521315} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.170100] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1041.173442] env[62914]: DEBUG nova.compute.manager [None req-6e9fc38c-6633-4567-8a68-92c7971f93f9 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1041.174405] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e29456-d2e1-4036-941d-86ff49705c02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.363312] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832467, 'name': PowerOffVM_Task, 'duration_secs': 0.395586} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.365316] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1041.365316] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1041.365316] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eaf3c80-10af-4028-bdaf-60744b86e240 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.389187] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1041.389439] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1041.389646] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore1] 60169fa7-3266-4105-b17b-f3677ed2c443 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.390348] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6584b775-64f0-432f-befc-1f4ad65ebed7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.397076] env[62914]: DEBUG nova.compute.manager [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received event network-vif-unplugged-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1041.397194] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.397421] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.397595] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.397769] env[62914]: DEBUG nova.compute.manager [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] No waiting events found dispatching network-vif-unplugged-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1041.397961] env[62914]: WARNING nova.compute.manager [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received unexpected event network-vif-unplugged-c64b5774-e946-4217-a170-f93b64d5070b for instance with vm_state shelved and task_state shelving_offloading. [ 1041.398115] env[62914]: DEBUG nova.compute.manager [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Received event network-changed-c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1041.398273] env[62914]: DEBUG nova.compute.manager [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Refreshing instance network info cache due to event network-changed-c64b5774-e946-4217-a170-f93b64d5070b. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1041.398638] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Acquiring lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.399185] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Acquired lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.399185] env[62914]: DEBUG nova.network.neutron [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Refreshing network info cache for port c64b5774-e946-4217-a170-f93b64d5070b {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1041.401969] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1041.401969] env[62914]: value = "task-4832469" [ 1041.401969] env[62914]: _type = "Task" [ 1041.401969] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.413329] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.458762] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1041.459158] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1041.459474] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleting the datastore file [datastore1] aedc785f-619f-4b9f-850f-790f84e57577 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.459905] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b96d2eb-a170-4415-80d6-2c6a13a7d66e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.468600] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for the task: (returnval){ [ 1041.468600] env[62914]: value = "task-4832470" [ 1041.468600] env[62914]: _type = "Task" [ 1041.468600] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.478487] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.633568] env[62914]: INFO nova.compute.manager [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Took 22.19 seconds to build instance. [ 1041.876510] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1041.876850] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86703a1f-0ec1-43ae-b397-4983984c4545 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.885861] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1041.885861] env[62914]: value = "task-4832471" [ 1041.885861] env[62914]: _type = "Task" [ 1041.885861] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.896661] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.916482] env[62914]: DEBUG oslo_vmware.api [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246554} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.916875] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.917079] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1041.917360] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.952049] env[62914]: INFO nova.scheduler.client.report [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 60169fa7-3266-4105-b17b-f3677ed2c443 [ 1041.981557] env[62914]: DEBUG oslo_vmware.api [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Task: {'id': task-4832470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245671} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.982064] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.982432] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1041.982743] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.983097] env[62914]: INFO nova.compute.manager [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1041.983524] env[62914]: DEBUG oslo.service.loopingcall [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.983837] env[62914]: DEBUG nova.compute.manager [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1041.983988] env[62914]: DEBUG nova.network.neutron [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1042.082197] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.083639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.438s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.087176] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.821s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.088753] env[62914]: INFO nova.compute.claims [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.135680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-562f8be8-0367-44cc-898e-d176a5444cb8 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.699s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.191686] env[62914]: DEBUG nova.network.neutron [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updated VIF entry in instance network info cache for port c64b5774-e946-4217-a170-f93b64d5070b. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1042.191686] env[62914]: DEBUG nova.network.neutron [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updating instance_info_cache with network_info: [{"id": "c64b5774-e946-4217-a170-f93b64d5070b", "address": "fa:16:3e:c6:f5:e1", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": null, "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc64b5774-e9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.399316] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832471, 'name': PowerOffVM_Task, 'duration_secs': 0.457594} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.399863] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1042.401093] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.401764] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.406016] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.406016] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.406016] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.406016] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.410040] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.410040] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.410040] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.410040] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.410040] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.413217] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cc2b4ee-d36a-47d3-b244-56a9418e01b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.429379] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1042.429379] env[62914]: value = "task-4832472" [ 1042.429379] env[62914]: _type = "Task" [ 1042.429379] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.438693] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.457353] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.672534] env[62914]: INFO nova.scheduler.client.report [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocation for migration 8803623a-ecb1-4ba5-be89-a9d239df5334 [ 1042.694248] env[62914]: DEBUG oslo_concurrency.lockutils [req-9d6683ad-add6-463c-8411-8b8ae48df809 req-8d03b023-480d-4a01-98fe-f2b906d0b233 service nova] Releasing lock "refresh_cache-60169fa7-3266-4105-b17b-f3677ed2c443" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.883018] env[62914]: DEBUG nova.compute.manager [req-81640bbe-a762-41ee-9604-5394ea5b161e req-6f5f8140-3798-4a06-ad74-274879d65dec service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Received event network-vif-deleted-24c487f8-b730-47b7-8817-5b3894271b0f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1042.883018] env[62914]: INFO nova.compute.manager [req-81640bbe-a762-41ee-9604-5394ea5b161e req-6f5f8140-3798-4a06-ad74-274879d65dec service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Neutron deleted interface 24c487f8-b730-47b7-8817-5b3894271b0f; detaching it from the instance and deleting it from the info cache [ 1042.883018] env[62914]: DEBUG nova.network.neutron [req-81640bbe-a762-41ee-9604-5394ea5b161e req-6f5f8140-3798-4a06-ad74-274879d65dec service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.942099] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832472, 'name': ReconfigVM_Task, 'duration_secs': 0.260418} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.943196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34338f93-98b2-46c2-8557-f20e03c76c93 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.970096] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.970389] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.973022] env[62914]: DEBUG nova.virt.hardware [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.973022] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8b8403e-c0b9-4dbd-9e21-c1e7e3157a44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.980819] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1042.980819] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c69ba9-7a69-8ce5-ed2c-56ac4b858026" [ 1042.980819] env[62914]: _type = "Task" [ 1042.980819] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.991703] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c69ba9-7a69-8ce5-ed2c-56ac4b858026, 'name': SearchDatastore_Task, 'duration_secs': 0.008498} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.998014] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1042.998253] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63db6ded-e16c-41a3-b2dd-17e3c8097afa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.022342] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1043.022342] env[62914]: value = "task-4832473" [ 1043.022342] env[62914]: _type = "Task" [ 1043.022342] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.030239] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.143679] env[62914]: DEBUG nova.network.neutron [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.179470] env[62914]: DEBUG oslo_concurrency.lockutils [None req-02283e75-fd77-4757-a65b-64d8ceb05c20 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.658s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.382902] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94ec645e-b82c-42bb-bf89-b1c29b140402 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.393480] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7213d8ae-3b7c-4bd8-9827-e1433da9dabb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.406439] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57471e0e-4947-4276-a66b-3c7e7854cbaf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.414900] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd0184c-ed15-43a3-9991-2fbe088c7e29 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.460119] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f31ff0e-db56-4fe3-b99c-79a3b8d2363d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.463099] env[62914]: DEBUG nova.compute.manager [req-81640bbe-a762-41ee-9604-5394ea5b161e req-6f5f8140-3798-4a06-ad74-274879d65dec service nova] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Detach interface failed, port_id=24c487f8-b730-47b7-8817-5b3894271b0f, reason: Instance aedc785f-619f-4b9f-850f-790f84e57577 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1043.469420] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e26ef7-a3bd-4256-b718-43e1f3509bec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.487552] env[62914]: DEBUG nova.compute.provider_tree [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.531391] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832473, 'name': ReconfigVM_Task, 'duration_secs': 0.354873} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.531912] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1043.532719] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b36f7a0-4ac2-42ae-97db-a6546933166e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.560226] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.562665] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4aa124c6-32b1-495b-9d7f-55f1c5e146ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.585826] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1043.585826] env[62914]: value = "task-4832474" [ 1043.585826] env[62914]: _type = "Task" [ 1043.585826] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.596519] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.647828] env[62914]: INFO nova.compute.manager [-] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Took 1.66 seconds to deallocate network for instance. [ 1043.804487] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.858297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.859622] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.990572] env[62914]: DEBUG nova.scheduler.client.report [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.079699] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.080015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.080252] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.080499] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.080735] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.083692] env[62914]: INFO nova.compute.manager [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Terminating instance [ 1044.086297] env[62914]: DEBUG nova.compute.manager [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1044.086574] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1044.087798] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159ac324-94e5-4dd0-89b1-d9d9970841a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.099908] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1044.102726] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dfc0226-dfbc-48e3-b735-b5d6d0c447e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.104494] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832474, 'name': ReconfigVM_Task, 'duration_secs': 0.47401} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.105224] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03/ec73b924-e132-44b6-bc67-2b3c08592f03.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.106145] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c43ae1-7cc9-40b7-adbb-e77148ae4a04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.112154] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1044.112154] env[62914]: value = "task-4832475" [ 1044.112154] env[62914]: _type = "Task" [ 1044.112154] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.135638] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d8395c-f52f-45e4-b37c-3c513dcc7718 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.142388] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.162769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.163797] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed44b2a-baff-485e-a5ac-2eda8a8fe18f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.187463] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2fa27f-2108-4c45-8bdd-32a33e6816d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.195584] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1044.195908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54dfde44-3162-4c08-9aed-9593b685631b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.204278] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1044.204278] env[62914]: value = "task-4832476" [ 1044.204278] env[62914]: _type = "Task" [ 1044.204278] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.213873] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.362455] env[62914]: DEBUG nova.compute.utils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.499273] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.500032] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1044.503389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.213s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.505758] env[62914]: INFO nova.compute.claims [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1044.609083] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "18329e67-719b-4609-83de-7db2c4096781" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.609309] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.622840] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832475, 'name': PowerOffVM_Task, 'duration_secs': 0.19945} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.623142] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1044.623318] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1044.623575] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a8f782e-ff7f-4e0a-8676-5ee7258369d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.691009] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1044.691275] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1044.691463] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore2] 120fa16e-60cd-4326-b6c4-f1df419dbcb7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.691731] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dabe7a52-5045-4e57-b006-485511b9590e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.698301] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1044.698301] env[62914]: value = "task-4832478" [ 1044.698301] env[62914]: _type = "Task" [ 1044.698301] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.707151] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.715528] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832476, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.765543] env[62914]: INFO nova.compute.manager [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Rescuing [ 1044.765886] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.766113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.766340] env[62914]: DEBUG nova.network.neutron [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.866769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.907577] env[62914]: DEBUG nova.compute.manager [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Received event network-changed-689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1044.907795] env[62914]: DEBUG nova.compute.manager [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Refreshing instance network info cache due to event network-changed-689aba7f-31af-4116-8b4e-bcec10c9c5ba. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1044.908015] env[62914]: DEBUG oslo_concurrency.lockutils [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] Acquiring lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.908164] env[62914]: DEBUG oslo_concurrency.lockutils [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] Acquired lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.908328] env[62914]: DEBUG nova.network.neutron [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Refreshing network info cache for port 689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1045.012291] env[62914]: DEBUG nova.compute.utils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1045.016339] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1045.016530] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1045.065203] env[62914]: DEBUG nova.policy [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493014f3d66341759a8e03a7878d0af8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78ce97bf0a6a4b65b3cd1e316989a1ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1045.111960] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1045.209584] env[62914]: DEBUG oslo_vmware.api [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141858} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.212849] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.213052] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1045.213247] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1045.213429] env[62914]: INFO nova.compute.manager [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1045.213717] env[62914]: DEBUG oslo.service.loopingcall [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.213909] env[62914]: DEBUG nova.compute.manager [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1045.214035] env[62914]: DEBUG nova.network.neutron [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1045.221029] env[62914]: DEBUG oslo_vmware.api [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832476, 'name': PowerOnVM_Task, 'duration_secs': 0.653061} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.221327] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1045.462430] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Successfully created port: ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.517845] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1045.642634] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.654130] env[62914]: DEBUG nova.network.neutron [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [{"id": "f752f060-cdfa-4b16-904d-9263dfa26442", "address": "fa:16:3e:df:33:35", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752f060-cd", "ovs_interfaceid": "f752f060-cdfa-4b16-904d-9263dfa26442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.646920] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.647270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.647415] env[62914]: INFO nova.compute.manager [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Attaching volume 1781e247-2b2e-4cd0-b9da-898a7d3844dd to /dev/sdb [ 1046.651876] env[62914]: DEBUG nova.network.neutron [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.653100] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.657950] env[62914]: DEBUG nova.compute.manager [req-56a28897-3a6f-4754-94d4-d3b1c53b7d49 req-2d8d8d7b-75e2-4f7d-ada1-121842792dc9 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Received event network-vif-deleted-2295762d-8e27-469d-a292-9ef453b210d6 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1046.658138] env[62914]: INFO nova.compute.manager [req-56a28897-3a6f-4754-94d4-d3b1c53b7d49 req-2d8d8d7b-75e2-4f7d-ada1-121842792dc9 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Neutron deleted interface 2295762d-8e27-469d-a292-9ef453b210d6; detaching it from the instance and deleting it from the info cache [ 1046.658316] env[62914]: DEBUG nova.network.neutron [req-56a28897-3a6f-4754-94d4-d3b1c53b7d49 req-2d8d8d7b-75e2-4f7d-ada1-121842792dc9 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.699218] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30132175-7801-4765-8a59-007d0fd40007 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.702511] env[62914]: INFO nova.compute.manager [None req-fa6af39d-a79c-4f34-851c-da8c13702684 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance to original state: 'active' [ 1046.717918] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29af7fcb-c1e1-4e27-99da-4acf9bf7c093 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.735112] env[62914]: DEBUG nova.virt.block_device [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating existing volume attachment record: e7c6065e-464f-4b04-b4a5-dfb81c705064 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1046.741067] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8603074-3062-4545-90aa-7ce1e35f7a01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.749601] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ec639a-b486-401d-b384-985b76853d85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.783612] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2310a3-f305-46d6-ac0a-e544e29d9778 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.793275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef0ee0d-5c95-4542-872a-6b9886b87414 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.808821] env[62914]: DEBUG nova.compute.provider_tree [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.892246] env[62914]: DEBUG nova.network.neutron [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updated VIF entry in instance network info cache for port 689aba7f-31af-4116-8b4e-bcec10c9c5ba. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.892622] env[62914]: DEBUG nova.network.neutron [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating instance_info_cache with network_info: [{"id": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "address": "fa:16:3e:e8:5c:f2", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap689aba7f-31", "ovs_interfaceid": "689aba7f-31af-4116-8b4e-bcec10c9c5ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.160994] env[62914]: INFO nova.compute.manager [-] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Took 1.95 seconds to deallocate network for instance. [ 1047.162371] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1047.166923] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63d18af9-46e3-4aa9-8f0d-bc4affb3d4d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.183610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ad37ff-c576-4bcd-9aa5-2bf480739ffe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.200454] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1047.200570] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1035a73-ce87-4a28-ad87-6a3c20006489 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.205829] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1047.206041] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1047.206191] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.206390] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1047.206543] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.206835] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1047.206919] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1047.207104] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1047.207299] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1047.207480] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1047.207690] env[62914]: DEBUG nova.virt.hardware [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1047.208583] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37c1088-50dd-40c4-9a15-989da8fb3b05 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.230263] env[62914]: DEBUG nova.compute.manager [req-56a28897-3a6f-4754-94d4-d3b1c53b7d49 req-2d8d8d7b-75e2-4f7d-ada1-121842792dc9 service nova] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Detach interface failed, port_id=2295762d-8e27-469d-a292-9ef453b210d6, reason: Instance 120fa16e-60cd-4326-b6c4-f1df419dbcb7 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1047.230871] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1047.230871] env[62914]: value = "task-4832482" [ 1047.230871] env[62914]: _type = "Task" [ 1047.230871] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.243171] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c461c1-9dc6-47f2-a102-468966e486e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.251221] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.312664] env[62914]: DEBUG nova.scheduler.client.report [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1047.395888] env[62914]: DEBUG oslo_concurrency.lockutils [req-853cfd19-63df-4458-b2f1-718c453fba28 req-7951fa0f-f1f8-41ba-a7bb-16a64c121406 service nova] Releasing lock "refresh_cache-3b26b5d7-524a-41af-ab75-a158568e031e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.675459] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.678599] env[62914]: DEBUG nova.compute.manager [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Received event network-vif-plugged-ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1047.678972] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] Acquiring lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.679346] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.679653] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.679960] env[62914]: DEBUG nova.compute.manager [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] No waiting events found dispatching network-vif-plugged-ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1047.680285] env[62914]: WARNING nova.compute.manager [req-7bec40e2-0447-47f5-8b84-0e286158e27c req-c3f73f48-c91a-4b12-b67a-1f0c0bbdd619 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Received unexpected event network-vif-plugged-ac958614-7d81-4f75-b8a8-abc5e626a182 for instance with vm_state building and task_state spawning. [ 1047.751534] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832482, 'name': PowerOffVM_Task, 'duration_secs': 0.237566} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.751714] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1047.752555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7cc07c-8e08-4aba-a8c6-7db386757dc7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.773520] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b096c16-c3e3-4a88-9eab-bba62e36396f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.814213] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1047.814527] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5479dcad-1e62-4cdb-8603-d374f5cafe63 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.819506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.316s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.820116] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1047.825169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.192s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.826645] env[62914]: INFO nova.compute.claims [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1047.829219] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1047.829219] env[62914]: value = "task-4832483" [ 1047.829219] env[62914]: _type = "Task" [ 1047.829219] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.839470] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1047.839698] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.840079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.840304] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.840554] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.840898] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70274970-581a-45c1-af65-9b723eff2805 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.852121] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.852121] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1047.852615] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bc0aa2b-d528-4e19-bac2-b4f03f7a792a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.859020] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1047.859020] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a39c53-b0bd-0c19-435a-26e8c760ded2" [ 1047.859020] env[62914]: _type = "Task" [ 1047.859020] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.867611] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a39c53-b0bd-0c19-435a-26e8c760ded2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.911307] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Successfully updated port: ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.332145] env[62914]: DEBUG nova.compute.utils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1048.337481] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1048.337713] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1048.371772] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a39c53-b0bd-0c19-435a-26e8c760ded2, 'name': SearchDatastore_Task, 'duration_secs': 0.011214} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.372081] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bf87ee0-b213-4a6b-94c9-02182412c4c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.378787] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1048.378787] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528df1f5-fadd-7ba4-d9e3-ef9a457c0924" [ 1048.378787] env[62914]: _type = "Task" [ 1048.378787] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.387577] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528df1f5-fadd-7ba4-d9e3-ef9a457c0924, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.413948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.414156] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.414339] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1048.418062] env[62914]: DEBUG nova.policy [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4f1342629ac4aee802a2b69a5459827', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ae1b7abf6f24eccb2b44d82687deb76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1048.540851] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.541365] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.541621] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.541887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.542112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.544376] env[62914]: INFO nova.compute.manager [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Terminating instance [ 1048.546486] env[62914]: DEBUG nova.compute.manager [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1048.546658] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1048.546914] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dc347ad-84ba-4801-88a7-80e929cfdb19 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.557136] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1048.557136] env[62914]: value = "task-4832484" [ 1048.557136] env[62914]: _type = "Task" [ 1048.557136] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.566207] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.845502] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Successfully created port: 57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.848725] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1048.894140] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528df1f5-fadd-7ba4-d9e3-ef9a457c0924, 'name': SearchDatastore_Task, 'duration_secs': 0.0236} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.897781] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.898173] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1048.898737] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f71f6144-6a2c-44b3-b5b0-ea64d171413f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.906550] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1048.906550] env[62914]: value = "task-4832485" [ 1048.906550] env[62914]: _type = "Task" [ 1048.906550] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.923325] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.967933] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1049.069734] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832484, 'name': PowerOffVM_Task, 'duration_secs': 0.476159} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.070491] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1049.070856] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1049.070945] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942013', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'name': 'volume-864a42ed-47df-4ae7-ace0-224fba823a1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ec73b924-e132-44b6-bc67-2b3c08592f03', 'attached_at': '2025-11-25T11:30:47.000000', 'detached_at': '', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'serial': '864a42ed-47df-4ae7-ace0-224fba823a1f'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1049.075048] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fea2def-e70d-4a2a-9d25-6f40e06850bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.101982] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31632d10-f5a9-4b6c-b52f-e8ae5e54421d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.110610] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ac5b42-9a4d-472e-aaae-52bd9ad7948d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.135821] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d51789-731f-4412-afe1-f84a69c09a67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.156952] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] The volume has not been displaced from its original location: [datastore2] volume-864a42ed-47df-4ae7-ace0-224fba823a1f/volume-864a42ed-47df-4ae7-ace0-224fba823a1f.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1049.162720] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1049.165750] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64b1d496-6322-407e-80a2-27d666c77d3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.187770] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1049.187770] env[62914]: value = "task-4832487" [ 1049.187770] env[62914]: _type = "Task" [ 1049.187770] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.200517] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832487, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.235415] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e655a904-9835-4bbc-a791-98611ca74d43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.244170] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efb8506-09aa-495b-95e3-6ed0db792366 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.280162] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398c7729-761a-4960-9ad9-c5ea443e5449 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.289599] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92b44b2-b6ce-40d3-abac-09fe3d95f3fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.313846] env[62914]: DEBUG nova.compute.provider_tree [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.413584] env[62914]: DEBUG nova.network.neutron [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Updating instance_info_cache with network_info: [{"id": "ac958614-7d81-4f75-b8a8-abc5e626a182", "address": "fa:16:3e:70:92:34", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac958614-7d", "ovs_interfaceid": "ac958614-7d81-4f75-b8a8-abc5e626a182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.422187] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832485, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.698688] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832487, 'name': ReconfigVM_Task, 'duration_secs': 0.381771} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.699017] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1049.705155] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5762073-76d6-4193-bbc7-fc0a869d8865 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.716806] env[62914]: DEBUG nova.compute.manager [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Received event network-changed-ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1049.717057] env[62914]: DEBUG nova.compute.manager [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Refreshing instance network info cache due to event network-changed-ac958614-7d81-4f75-b8a8-abc5e626a182. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1049.717296] env[62914]: DEBUG oslo_concurrency.lockutils [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] Acquiring lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.723951] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1049.723951] env[62914]: value = "task-4832488" [ 1049.723951] env[62914]: _type = "Task" [ 1049.723951] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.734990] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832488, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.814310] env[62914]: DEBUG nova.scheduler.client.report [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1049.864786] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1049.890276] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1049.890537] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1049.890699] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.890886] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1049.891048] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.891207] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1049.891415] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1049.891576] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1049.891794] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1049.892089] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1049.892345] env[62914]: DEBUG nova.virt.hardware [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1049.893216] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775b8000-62bf-4899-9e61-64777e7b0185 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.901690] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc8d48a-7d06-46bd-9db1-aa59d29189be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.918147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.918433] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Instance network_info: |[{"id": "ac958614-7d81-4f75-b8a8-abc5e626a182", "address": "fa:16:3e:70:92:34", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac958614-7d", "ovs_interfaceid": "ac958614-7d81-4f75-b8a8-abc5e626a182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1049.918719] env[62914]: DEBUG oslo_concurrency.lockutils [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] Acquired lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.918902] env[62914]: DEBUG nova.network.neutron [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Refreshing network info cache for port ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1049.920305] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:92:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78b49840-c3fc-455c-8491-a253ccd92bb5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac958614-7d81-4f75-b8a8-abc5e626a182', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.927484] env[62914]: DEBUG oslo.service.loopingcall [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.930478] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1049.934194] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9233806-6996-4deb-87ca-9c88ee0742be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.948618] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742286} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.948890] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 1049.950059] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752df188-16fa-49bb-b14e-b947f0ce3370 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.954224] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.954224] env[62914]: value = "task-4832489" [ 1049.954224] env[62914]: _type = "Task" [ 1049.954224] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.977937] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.983576] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d951d02-41c1-4243-85e0-46de4e891329 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.997435] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832489, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.003414] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1050.003414] env[62914]: value = "task-4832490" [ 1050.003414] env[62914]: _type = "Task" [ 1050.003414] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.011904] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.201305] env[62914]: DEBUG nova.network.neutron [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Updated VIF entry in instance network info cache for port ac958614-7d81-4f75-b8a8-abc5e626a182. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1050.201705] env[62914]: DEBUG nova.network.neutron [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Updating instance_info_cache with network_info: [{"id": "ac958614-7d81-4f75-b8a8-abc5e626a182", "address": "fa:16:3e:70:92:34", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac958614-7d", "ovs_interfaceid": "ac958614-7d81-4f75-b8a8-abc5e626a182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.233590] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832488, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.321621] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.322247] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1050.324971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.842s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.325229] env[62914]: DEBUG nova.objects.instance [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lazy-loading 'resources' on Instance uuid 4911baea-15df-46db-be11-fcf998eb0cb6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.466445] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832489, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.514970] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832490, 'name': ReconfigVM_Task, 'duration_secs': 0.377441} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.514970] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfigured VM instance instance-0000005f to attach disk [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.514970] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ceb535c-a4ec-47f0-b3f2-a15b7228665b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.542752] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0acb51a8-ac27-408b-9631-d26e29407722 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.559605] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1050.559605] env[62914]: value = "task-4832491" [ 1050.559605] env[62914]: _type = "Task" [ 1050.559605] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.568018] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.704702] env[62914]: DEBUG oslo_concurrency.lockutils [req-8cae7f81-135e-4fdc-9396-e67447d0752c req-e176752b-d740-496a-9948-6c5adc66c5f6 service nova] Releasing lock "refresh_cache-68a77363-c25b-426e-86e2-fa31fc6f0ee1" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.734487] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832488, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.828609] env[62914]: DEBUG nova.compute.utils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1050.833751] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1050.833949] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1050.953370] env[62914]: DEBUG nova.policy [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1050.964781] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832489, 'name': CreateVM_Task, 'duration_secs': 0.694558} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.964999] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1050.966666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.966666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.966666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1050.966666] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b714673-c314-4456-84db-9ded6f37cec0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.974855] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1050.974855] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b4c13-b37d-677c-4aed-68b961340ca0" [ 1050.974855] env[62914]: _type = "Task" [ 1050.974855] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.986261] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b4c13-b37d-677c-4aed-68b961340ca0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.072861] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832491, 'name': ReconfigVM_Task, 'duration_secs': 0.18276} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.073236] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1051.073515] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a26cecfd-92a2-46b0-91e4-1b64a6afa179 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.084340] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1051.084340] env[62914]: value = "task-4832492" [ 1051.084340] env[62914]: _type = "Task" [ 1051.084340] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.095179] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.134760] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84810bb0-2ead-435d-b8e2-b0404e7071e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.142886] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872b4a78-de22-4cd7-b476-07803adca8c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.176750] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613d178c-0118-4af8-bfae-e4b555973191 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.184848] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Successfully updated port: 57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1051.189840] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b39385d-8df2-4f2c-917c-d47600fdad43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.208321] env[62914]: DEBUG nova.compute.provider_tree [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.235510] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832488, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.291661] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1051.291993] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942044', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'name': 'volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '06e8b438-01ef-481f-8e27-2faa01bb97aa', 'attached_at': '', 'detached_at': '', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'serial': '1781e247-2b2e-4cd0-b9da-898a7d3844dd'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1051.292909] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a9209e-ecf8-42d8-98cc-140e9c0eb9c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.312464] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c3570e-6754-4f34-ba1d-ff7be3a1c055 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.341437] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd/volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.342241] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1051.345041] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cea24ea9-5af2-4c26-b7f6-ba241c326727 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.367430] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1051.367430] env[62914]: value = "task-4832493" [ 1051.367430] env[62914]: _type = "Task" [ 1051.367430] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.380425] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.387537] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Successfully created port: 5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1051.488571] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b4c13-b37d-677c-4aed-68b961340ca0, 'name': SearchDatastore_Task, 'duration_secs': 0.022232} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.488571] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.488681] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.489355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.489355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.489514] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.489825] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4ce1c2e-7d92-41db-ae10-a7991f4fed51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.500962] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.501224] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1051.502139] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb1cb309-9de0-42bf-9013-eaa1f9db9fb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.509066] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1051.509066] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52981522-f81e-efe4-1971-bd4370d8f49f" [ 1051.509066] env[62914]: _type = "Task" [ 1051.509066] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.520196] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52981522-f81e-efe4-1971-bd4370d8f49f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.594691] env[62914]: DEBUG oslo_vmware.api [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832492, 'name': PowerOnVM_Task, 'duration_secs': 0.4931} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.595009] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1051.597897] env[62914]: DEBUG nova.compute.manager [None req-6b3ccc09-c45f-4883-8d4e-65287b2c6266 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1051.598755] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7e61bc-cb61-4513-88e8-537ad882f936 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.699154] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.699319] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.699478] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1051.711262] env[62914]: DEBUG nova.scheduler.client.report [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1051.737259] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832488, 'name': ReconfigVM_Task, 'duration_secs': 1.856843} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.741750] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942013', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'name': 'volume-864a42ed-47df-4ae7-ace0-224fba823a1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ec73b924-e132-44b6-bc67-2b3c08592f03', 'attached_at': '2025-11-25T11:30:47.000000', 'detached_at': '', 'volume_id': '864a42ed-47df-4ae7-ace0-224fba823a1f', 'serial': '864a42ed-47df-4ae7-ace0-224fba823a1f'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1051.741750] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1051.743892] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654e8f23-410e-4df6-b156-d9805cff806d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.755686] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1051.756182] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bc41da8-e208-45c0-88d5-423fadfb7c83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.830975] env[62914]: DEBUG nova.compute.manager [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Received event network-vif-plugged-57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1051.831372] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Acquiring lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.831754] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.832102] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.832322] env[62914]: DEBUG nova.compute.manager [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] No waiting events found dispatching network-vif-plugged-57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1051.832514] env[62914]: WARNING nova.compute.manager [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Received unexpected event network-vif-plugged-57890d0b-660c-4230-8104-4d1ae53eb7ce for instance with vm_state building and task_state spawning. [ 1051.832689] env[62914]: DEBUG nova.compute.manager [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Received event network-changed-57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1051.832857] env[62914]: DEBUG nova.compute.manager [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Refreshing instance network info cache due to event network-changed-57890d0b-660c-4230-8104-4d1ae53eb7ce. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1051.833055] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Acquiring lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.834990] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1051.835254] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1051.835453] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleting the datastore file [datastore2] ec73b924-e132-44b6-bc67-2b3c08592f03 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.835745] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-475f5264-607f-4647-89fe-76a06fcfa6c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.845148] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1051.845148] env[62914]: value = "task-4832495" [ 1051.845148] env[62914]: _type = "Task" [ 1051.845148] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.855093] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.879886] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.021377] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52981522-f81e-efe4-1971-bd4370d8f49f, 'name': SearchDatastore_Task, 'duration_secs': 0.026087} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.022886] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa93a408-c71d-4f1c-b4b7-df1d321f834e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.029531] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1052.029531] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526fcc3f-e637-377e-acae-f7e705af894e" [ 1052.029531] env[62914]: _type = "Task" [ 1052.029531] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.041968] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526fcc3f-e637-377e-acae-f7e705af894e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.216947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.219347] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.385s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.220027] env[62914]: DEBUG nova.objects.instance [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'resources' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.245842] env[62914]: INFO nova.scheduler.client.report [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted allocations for instance 4911baea-15df-46db-be11-fcf998eb0cb6 [ 1052.265242] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1052.356039] env[62914]: DEBUG oslo_vmware.api [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406323} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.358673] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.358932] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1052.359632] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1052.359889] env[62914]: INFO nova.compute.manager [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Took 3.81 seconds to destroy the instance on the hypervisor. [ 1052.360163] env[62914]: DEBUG oslo.service.loopingcall [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.360490] env[62914]: DEBUG nova.compute.manager [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1052.360595] env[62914]: DEBUG nova.network.neutron [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1052.368720] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1052.378998] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.395567] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1052.395831] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1052.396014] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.396221] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1052.396367] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.396513] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1052.396738] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1052.398180] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1052.398180] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1052.398180] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1052.398180] env[62914]: DEBUG nova.virt.hardware [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1052.398490] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c5b9b9-3988-400f-a988-3821d63cbbad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.407725] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d795c5d6-50fe-48d8-94b0-c645884487d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.529898] env[62914]: DEBUG nova.network.neutron [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updating instance_info_cache with network_info: [{"id": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "address": "fa:16:3e:b7:90:c9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57890d0b-66", "ovs_interfaceid": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.544024] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526fcc3f-e637-377e-acae-f7e705af894e, 'name': SearchDatastore_Task, 'duration_secs': 0.059178} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.544024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.544533] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 68a77363-c25b-426e-86e2-fa31fc6f0ee1/68a77363-c25b-426e-86e2-fa31fc6f0ee1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1052.545368] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4861516c-cf60-4970-96c3-5dee5bffae74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.555821] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1052.555821] env[62914]: value = "task-4832496" [ 1052.555821] env[62914]: _type = "Task" [ 1052.555821] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.566409] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.703816] env[62914]: INFO nova.compute.manager [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Unrescuing [ 1052.704147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.704366] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.704563] env[62914]: DEBUG nova.network.neutron [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1052.723388] env[62914]: DEBUG nova.objects.instance [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'numa_topology' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.756947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6175afb3-fd48-45a2-b157-3f809a37a4b9 tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "4911baea-15df-46db-be11-fcf998eb0cb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.583s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.879448] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832493, 'name': ReconfigVM_Task, 'duration_secs': 1.229981} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.879766] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd/volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.884908] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c9b63a2-501e-41c5-b7b2-4051ea20d820 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.902179] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1052.902179] env[62914]: value = "task-4832497" [ 1052.902179] env[62914]: _type = "Task" [ 1052.902179] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.916880] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.945460] env[62914]: DEBUG nova.compute.manager [req-71bd0cfb-699b-43c4-851c-201995d25700 req-8843bc97-9975-430d-bcf7-13b4df3474d1 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Received event network-vif-deleted-94d0e4cd-493e-4e41-89dc-b0636889e9d9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1052.945676] env[62914]: INFO nova.compute.manager [req-71bd0cfb-699b-43c4-851c-201995d25700 req-8843bc97-9975-430d-bcf7-13b4df3474d1 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Neutron deleted interface 94d0e4cd-493e-4e41-89dc-b0636889e9d9; detaching it from the instance and deleting it from the info cache [ 1052.945853] env[62914]: DEBUG nova.network.neutron [req-71bd0cfb-699b-43c4-851c-201995d25700 req-8843bc97-9975-430d-bcf7-13b4df3474d1 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.980326] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "10102941-c31a-4ab1-be5a-801520d49fd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.980611] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.980822] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.981015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.981234] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.983536] env[62914]: INFO nova.compute.manager [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Terminating instance [ 1052.985890] env[62914]: DEBUG nova.compute.manager [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1052.986110] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1052.986949] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d66c9a-6577-4169-a52d-379c02b01ce0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.995133] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1052.995425] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7021ceb9-fe61-4dbe-9825-fcc13967d7e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.003748] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1053.003748] env[62914]: value = "task-4832498" [ 1053.003748] env[62914]: _type = "Task" [ 1053.003748] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.013274] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.036297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.036685] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance network_info: |[{"id": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "address": "fa:16:3e:b7:90:c9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57890d0b-66", "ovs_interfaceid": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1053.037072] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Acquired lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.037308] env[62914]: DEBUG nova.network.neutron [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Refreshing network info cache for port 57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1053.038757] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:90:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c9a12d2-469f-4199-bfaa-f791d765deac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57890d0b-660c-4230-8104-4d1ae53eb7ce', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.046797] env[62914]: DEBUG oslo.service.loopingcall [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.047922] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1053.048203] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a17cfb6-d77b-44a7-b0ef-b7037cbed74a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.077133] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.077133] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.077133] env[62914]: value = "task-4832499" [ 1053.077133] env[62914]: _type = "Task" [ 1053.077133] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.085805] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832499, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.226081] env[62914]: DEBUG nova.objects.base [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Object Instance<455965de-816d-4ab2-9d5e-a12b06893e6f> lazy-loaded attributes: resources,numa_topology {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1053.414668] env[62914]: DEBUG oslo_vmware.api [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832497, 'name': ReconfigVM_Task, 'duration_secs': 0.375922} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.415051] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942044', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'name': 'volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '06e8b438-01ef-481f-8e27-2faa01bb97aa', 'attached_at': '', 'detached_at': '', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'serial': '1781e247-2b2e-4cd0-b9da-898a7d3844dd'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1053.422576] env[62914]: DEBUG nova.network.neutron [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.452128] env[62914]: DEBUG nova.network.neutron [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [{"id": "f752f060-cdfa-4b16-904d-9263dfa26442", "address": "fa:16:3e:df:33:35", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752f060-cd", "ovs_interfaceid": "f752f060-cdfa-4b16-904d-9263dfa26442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.456461] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf6239d0-af17-4c2a-a2dc-4b6eecbf0939 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.469976] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54950057-e5ce-4b2d-8c47-f6602c2a7750 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.482756] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Successfully updated port: 5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1053.514025] env[62914]: DEBUG nova.compute.manager [req-71bd0cfb-699b-43c4-851c-201995d25700 req-8843bc97-9975-430d-bcf7-13b4df3474d1 service nova] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Detach interface failed, port_id=94d0e4cd-493e-4e41-89dc-b0636889e9d9, reason: Instance ec73b924-e132-44b6-bc67-2b3c08592f03 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1053.528133] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832498, 'name': PowerOffVM_Task, 'duration_secs': 0.21443} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.528133] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1053.528133] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1053.528133] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b99aaf4-430c-46f0-8e7f-e894f3be2438 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.570910] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb3e81a-3aed-47af-80c6-9093082a2966 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.590153] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832496, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.591855] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f8369b-e5a9-4d9d-838b-3236401bd72e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.601488] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832499, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.602380] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1053.602380] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1053.602380] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleting the datastore file [datastore2] 10102941-c31a-4ab1-be5a-801520d49fd7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.603019] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1279c737-779c-4872-bae4-f256ddf29eb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.635413] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596c1888-dc42-44fa-b4a3-8c2605a02de8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.638498] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for the task: (returnval){ [ 1053.638498] env[62914]: value = "task-4832501" [ 1053.638498] env[62914]: _type = "Task" [ 1053.638498] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.647387] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee20916e-2bd4-4d70-bf9b-734593c6c6c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.657186] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.668698] env[62914]: DEBUG nova.compute.provider_tree [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.926519] env[62914]: INFO nova.compute.manager [-] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Took 1.57 seconds to deallocate network for instance. [ 1053.961587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.961587] env[62914]: DEBUG nova.objects.instance [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'flavor' on Instance uuid bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.993373] env[62914]: DEBUG nova.compute.manager [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Received event network-vif-plugged-5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1053.993373] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Acquiring lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.993373] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.993373] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.993373] env[62914]: DEBUG nova.compute.manager [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] No waiting events found dispatching network-vif-plugged-5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1053.993373] env[62914]: WARNING nova.compute.manager [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Received unexpected event network-vif-plugged-5c3a4ef5-fd55-4dee-8a2a-9710098285f4 for instance with vm_state building and task_state spawning. [ 1053.993373] env[62914]: DEBUG nova.compute.manager [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Received event network-changed-5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1053.993373] env[62914]: DEBUG nova.compute.manager [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Refreshing instance network info cache due to event network-changed-5c3a4ef5-fd55-4dee-8a2a-9710098285f4. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1053.993373] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Acquiring lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.993373] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Acquired lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.993373] env[62914]: DEBUG nova.network.neutron [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Refreshing network info cache for port 5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1053.994442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.021296] env[62914]: DEBUG nova.network.neutron [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updated VIF entry in instance network info cache for port 57890d0b-660c-4230-8104-4d1ae53eb7ce. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1054.021653] env[62914]: DEBUG nova.network.neutron [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updating instance_info_cache with network_info: [{"id": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "address": "fa:16:3e:b7:90:c9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57890d0b-66", "ovs_interfaceid": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.075560] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832496, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.446318} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.078063] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 68a77363-c25b-426e-86e2-fa31fc6f0ee1/68a77363-c25b-426e-86e2-fa31fc6f0ee1.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1054.078063] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.078063] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe00f9d3-5122-4602-a8cb-0a4c0d274a91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.089954] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832499, 'name': CreateVM_Task, 'duration_secs': 0.811137} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.091346] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1054.091741] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1054.091741] env[62914]: value = "task-4832502" [ 1054.091741] env[62914]: _type = "Task" [ 1054.091741] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.092473] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.092647] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.092995] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1054.093337] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e686a33a-40ea-47b3-b519-eb11f946b8c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.105745] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832502, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.107459] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1054.107459] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cf26de-1074-59e8-ca7e-3d90179a3dc6" [ 1054.107459] env[62914]: _type = "Task" [ 1054.107459] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.116929] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cf26de-1074-59e8-ca7e-3d90179a3dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.149715] env[62914]: DEBUG oslo_vmware.api [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Task: {'id': task-4832501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378519} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.150080] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.150291] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1054.150478] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1054.150661] env[62914]: INFO nova.compute.manager [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1054.150917] env[62914]: DEBUG oslo.service.loopingcall [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.151157] env[62914]: DEBUG nova.compute.manager [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1054.151256] env[62914]: DEBUG nova.network.neutron [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1054.171722] env[62914]: DEBUG nova.scheduler.client.report [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.467567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaee16c-8c2e-4232-9919-0479f48b2306 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.503101] env[62914]: DEBUG nova.objects.instance [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid 06e8b438-01ef-481f-8e27-2faa01bb97aa {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.509033] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1054.510075] env[62914]: INFO nova.compute.manager [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Took 0.58 seconds to detach 1 volumes for instance. [ 1054.512504] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6881236d-a00b-46da-a0ef-cd78782e54b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.527090] env[62914]: DEBUG oslo_concurrency.lockutils [req-89c2e5b1-508c-4b53-a133-47094ba6f411 req-5d84761a-27c3-4dd7-8501-546b9503a7f4 service nova] Releasing lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.528155] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1054.528155] env[62914]: value = "task-4832503" [ 1054.528155] env[62914]: _type = "Task" [ 1054.528155] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.540693] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832503, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.552538] env[62914]: DEBUG nova.network.neutron [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1054.604307] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832502, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.466981} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.604616] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1054.605475] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2857ffab-2111-4df6-ad99-4a3e4903761d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.617831] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cf26de-1074-59e8-ca7e-3d90179a3dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.115364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.627600] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.627893] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.628159] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.628314] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.628495] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.638042] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 68a77363-c25b-426e-86e2-fa31fc6f0ee1/68a77363-c25b-426e-86e2-fa31fc6f0ee1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.640791] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e3080f9-3817-45aa-a1d8-430e7c7a1b42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.642763] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8876808-2fee-42ef-9704-6c2ab038e771 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.664366] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1054.664366] env[62914]: value = "task-4832504" [ 1054.664366] env[62914]: _type = "Task" [ 1054.664366] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.673689] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.673919] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1054.678572] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7676bcaf-0640-4116-bad7-d59a09a5b50f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.681588] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.462s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.684213] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.684758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.676s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.686348] env[62914]: INFO nova.compute.claims [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.692568] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1054.692568] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab649f-d629-ecec-8d4c-6f0243905e4c" [ 1054.692568] env[62914]: _type = "Task" [ 1054.692568] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.693469] env[62914]: DEBUG nova.network.neutron [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.704882] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab649f-d629-ecec-8d4c-6f0243905e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.907594] env[62914]: DEBUG nova.network.neutron [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.009619] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3c2ef21e-90f0-4cf3-901a-c1a99373d025 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.362s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.023492] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.039266] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832503, 'name': PowerOffVM_Task, 'duration_secs': 0.212991} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.039890] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1055.045977] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1055.045977] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96fd4a11-00b2-4d2b-988b-12ec7c6d563a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.067070] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1055.067070] env[62914]: value = "task-4832505" [ 1055.067070] env[62914]: _type = "Task" [ 1055.067070] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.077392] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832505, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.180228] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832504, 'name': ReconfigVM_Task, 'duration_secs': 0.473871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.180535] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 68a77363-c25b-426e-86e2-fa31fc6f0ee1/68a77363-c25b-426e-86e2-fa31fc6f0ee1.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.181156] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99ca38c8-3fd8-4cf9-9460-4ad9e54623d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.188506] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1055.188506] env[62914]: value = "task-4832506" [ 1055.188506] env[62914]: _type = "Task" [ 1055.188506] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.195295] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b7f73bde-e9c4-41ae-834a-457b79150af7 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 37.601s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.198869] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 13.117s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.199086] env[62914]: INFO nova.compute.manager [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Unshelving [ 1055.201407] env[62914]: DEBUG oslo_concurrency.lockutils [req-e6f2fb85-0561-424d-b8f7-6cf9f2fe6043 req-b3f22a8b-13e9-4861-a0d3-43951161cebf service nova] Releasing lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.209338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.210106] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1055.212122] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832506, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.218041] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ab649f-d629-ecec-8d4c-6f0243905e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.026714} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.219621] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b08a1b-1ae3-4a8c-a1b7-553583c01f9d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.227997] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1055.227997] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521f1888-b81a-cd60-34b3-eedce462b28f" [ 1055.227997] env[62914]: _type = "Task" [ 1055.227997] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.237750] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521f1888-b81a-cd60-34b3-eedce462b28f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.411180] env[62914]: INFO nova.compute.manager [-] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Took 1.26 seconds to deallocate network for instance. [ 1055.577836] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832505, 'name': ReconfigVM_Task, 'duration_secs': 0.214976} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.578255] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1055.578472] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1055.578752] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7b6863d-c92d-4c80-a34b-1048d8c044eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.586580] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1055.586580] env[62914]: value = "task-4832507" [ 1055.586580] env[62914]: _type = "Task" [ 1055.586580] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.595251] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.698875] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832506, 'name': Rename_Task, 'duration_secs': 0.248328} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.699200] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1055.699467] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3da8b6c-b15f-4b67-82eb-49dd62a93d81 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.712033] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1055.712033] env[62914]: value = "task-4832508" [ 1055.712033] env[62914]: _type = "Task" [ 1055.712033] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.733902] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832508, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.752991] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521f1888-b81a-cd60-34b3-eedce462b28f, 'name': SearchDatastore_Task, 'duration_secs': 0.01918} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.754096] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.754096] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1055.754096] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29f4fbb7-dad9-4825-af8f-068023a61dc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.763285] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1055.763285] env[62914]: value = "task-4832509" [ 1055.763285] env[62914]: _type = "Task" [ 1055.763285] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.776045] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.794754] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1055.917799] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.065412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be517b7-26c5-40b8-92ef-bec4f3863a5d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.074086] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df616b12-7f50-4878-b97b-0e866f29e2bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.117141] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2285b2d-97b3-4f5f-8739-903bf5c3c1ad {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.127116] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa28d346-ecc0-4625-9263-79fa6b986bbd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.131626] env[62914]: DEBUG oslo_vmware.api [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832507, 'name': PowerOnVM_Task, 'duration_secs': 0.415747} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.133232] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1056.133480] env[62914]: DEBUG nova.compute.manager [None req-3679a78c-a798-49b7-b33f-75dd1b84f23b tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1056.134593] env[62914]: DEBUG nova.compute.manager [req-b9571637-50b7-4c89-9a80-8d12f5c9c476 req-bec72ad1-b3bc-4268-a87f-0d1ba29289d2 service nova] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Received event network-vif-deleted-4a22be80-3c2e-4a9d-a348-f64b8b0e0ee2 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1056.135749] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e667c4c-d6e7-4db5-9284-ad1bb8ddd642 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.146948] env[62914]: DEBUG nova.compute.provider_tree [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.185206] env[62914]: DEBUG nova.network.neutron [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Updating instance_info_cache with network_info: [{"id": "5c3a4ef5-fd55-4dee-8a2a-9710098285f4", "address": "fa:16:3e:7f:44:6e", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c3a4ef5-fd", "ovs_interfaceid": "5c3a4ef5-fd55-4dee-8a2a-9710098285f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.223416] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.242229] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.276456] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832509, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.653608] env[62914]: DEBUG nova.scheduler.client.report [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1056.688348] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-da2af7d4-f311-444a-aa9f-0744e698defb" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.688752] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance network_info: |[{"id": "5c3a4ef5-fd55-4dee-8a2a-9710098285f4", "address": "fa:16:3e:7f:44:6e", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c3a4ef5-fd", "ovs_interfaceid": "5c3a4ef5-fd55-4dee-8a2a-9710098285f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1056.689555] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:44:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c3a4ef5-fd55-4dee-8a2a-9710098285f4', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.701026] env[62914]: DEBUG oslo.service.loopingcall [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.701026] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1056.701026] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f31980de-511f-417b-b547-f3edfadd83ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.725628] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832508, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.727160] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.727160] env[62914]: value = "task-4832510" [ 1056.727160] env[62914]: _type = "Task" [ 1056.727160] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.737811] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832510, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.779091] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832509, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.008221} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.779436] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1056.779658] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.779947] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d30398ad-820e-4a90-9bf8-35a40454258c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.790202] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1056.790202] env[62914]: value = "task-4832511" [ 1056.790202] env[62914]: _type = "Task" [ 1056.790202] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.799532] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.159978] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.160551] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1057.163712] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.707s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.163840] env[62914]: DEBUG nova.objects.instance [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 60169fa7-3266-4105-b17b-f3677ed2c443 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.226525] env[62914]: DEBUG oslo_vmware.api [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832508, 'name': PowerOnVM_Task, 'duration_secs': 1.489762} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.226862] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1057.227014] env[62914]: INFO nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Took 10.06 seconds to spawn the instance on the hypervisor. [ 1057.227207] env[62914]: DEBUG nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1057.228231] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a3d225-671e-45b0-a4f4-7ad62c088165 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.240211] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832510, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.300447] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145475} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.300756] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.301660] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb47d57-26cf-4087-b430-19ec3691173a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.327037] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.327037] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81cc95d6-5eb9-4d76-95ee-988e44a74528 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.347384] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.347888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.353546] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1057.353546] env[62914]: value = "task-4832512" [ 1057.353546] env[62914]: _type = "Task" [ 1057.353546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.366110] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.666500] env[62914]: DEBUG nova.compute.utils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1057.668164] env[62914]: DEBUG nova.objects.instance [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'numa_topology' on Instance uuid 60169fa7-3266-4105-b17b-f3677ed2c443 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.669090] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1057.669263] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1057.724374] env[62914]: DEBUG nova.policy [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '958529ce89d049f3ade8733e57d9f841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adf406f1352240aba2338e64b8f182b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1057.737680] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832510, 'name': CreateVM_Task, 'duration_secs': 0.713624} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.737889] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1057.738771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.738771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.739107] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1057.739375] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf67c13e-00d2-48f4-8c6b-d8a3aadd4c20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.751826] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1057.751826] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529f87cc-97bb-70fe-77a7-e1cc98c8ae23" [ 1057.751826] env[62914]: _type = "Task" [ 1057.751826] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.761940] env[62914]: INFO nova.compute.manager [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Took 21.51 seconds to build instance. [ 1057.771852] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529f87cc-97bb-70fe-77a7-e1cc98c8ae23, 'name': SearchDatastore_Task, 'duration_secs': 0.014989} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.772035] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.772214] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.772565] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.772782] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.773113] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.773433] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-692ea066-0bce-4265-8263-fe99dac8c5d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.783819] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.784187] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1057.785206] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-576dcaf2-c33c-4ca8-b7d6-fe84ff0881c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.791417] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1057.791417] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523d9824-126e-01cb-c144-7dc26ce3f50e" [ 1057.791417] env[62914]: _type = "Task" [ 1057.791417] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.799628] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523d9824-126e-01cb-c144-7dc26ce3f50e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.850759] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1057.874776] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832512, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.172067] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1058.175571] env[62914]: DEBUG nova.objects.base [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Object Instance<60169fa7-3266-4105-b17b-f3677ed2c443> lazy-loaded attributes: resources,numa_topology {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1058.241502] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Successfully created port: 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.265085] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a2069353-0bc3-4dbd-9b40-a60fd96741bc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.030s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.309022] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523d9824-126e-01cb-c144-7dc26ce3f50e, 'name': SearchDatastore_Task, 'duration_secs': 0.035911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.309022] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb14df0-84a5-4a37-8e33-db9988acf2ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.319169] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1058.319169] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5b65c-1502-f86c-68a2-c3cba2455246" [ 1058.319169] env[62914]: _type = "Task" [ 1058.319169] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.333287] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5b65c-1502-f86c-68a2-c3cba2455246, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.365740] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832512, 'name': ReconfigVM_Task, 'duration_secs': 0.547165} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.370212] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Reconfigured VM instance instance-00000064 to attach disk [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.371165] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2d359d7-e68b-4049-972d-8d221a05b9db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.378468] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1058.378468] env[62914]: value = "task-4832513" [ 1058.378468] env[62914]: _type = "Task" [ 1058.378468] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.384741] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.390936] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832513, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.478776] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f780418-25a2-4030-9e87-045b96f64302 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.492068] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311bd172-a5f6-4b27-bde6-89d23b7f9053 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.553912] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e0c037-d906-458c-bd8d-cf48d8fa4b6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.562537] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2823340f-55fe-4933-8de2-f417ff5f5ca4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.582188] env[62914]: DEBUG nova.compute.provider_tree [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.831572] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d5b65c-1502-f86c-68a2-c3cba2455246, 'name': SearchDatastore_Task, 'duration_secs': 0.018796} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.832158] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.832579] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] da2af7d4-f311-444a-aa9f-0744e698defb/da2af7d4-f311-444a-aa9f-0744e698defb.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1058.832978] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45dbe5ba-2f68-4ae3-8bd1-94a23755d512 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.841802] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1058.841802] env[62914]: value = "task-4832514" [ 1058.841802] env[62914]: _type = "Task" [ 1058.841802] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.851861] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.890777] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832513, 'name': Rename_Task, 'duration_secs': 0.240082} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.891244] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1058.894083] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42963e76-b4b7-45dc-a5f6-ff1ada99139e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.900271] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1058.900271] env[62914]: value = "task-4832515" [ 1058.900271] env[62914]: _type = "Task" [ 1058.900271] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.909884] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.087176] env[62914]: DEBUG nova.scheduler.client.report [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1059.186373] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1059.213598] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1059.213838] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1059.214009] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.214221] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1059.214378] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.214536] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1059.214751] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1059.214916] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1059.215113] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1059.215293] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1059.215474] env[62914]: DEBUG nova.virt.hardware [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1059.216464] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95122963-ba70-4465-b1ff-f35d6867fe84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.226893] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6c658a-31d5-4b90-9d72-564a087da924 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.353308] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832514, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.412101] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832515, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.593238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.429s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.596291] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.434s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.600021] env[62914]: DEBUG nova.objects.instance [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lazy-loading 'resources' on Instance uuid aedc785f-619f-4b9f-850f-790f84e57577 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.657985] env[62914]: DEBUG nova.compute.manager [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1059.659087] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01408383-fcb4-4598-a9b0-99f3a95df013 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.859038] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.948573} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.859715] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] da2af7d4-f311-444a-aa9f-0744e698defb/da2af7d4-f311-444a-aa9f-0744e698defb.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1059.860045] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.860393] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffa65dbf-1cca-4a2c-951b-eddd6f04b8d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.870176] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1059.870176] env[62914]: value = "task-4832516" [ 1059.870176] env[62914]: _type = "Task" [ 1059.870176] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.883058] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.911872] env[62914]: DEBUG oslo_vmware.api [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832515, 'name': PowerOnVM_Task, 'duration_secs': 0.864184} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.912216] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1059.912967] env[62914]: INFO nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Took 10.05 seconds to spawn the instance on the hypervisor. [ 1059.913197] env[62914]: DEBUG nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1059.914623] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7948185e-bf44-4dbe-9983-0ceed10ba26e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.109812] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b6338e74-280b-495d-9370-aee59cab6474 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 50.448s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.110842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 16.307s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.111081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.111298] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.111471] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.113658] env[62914]: INFO nova.compute.manager [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Terminating instance [ 1060.116139] env[62914]: DEBUG nova.compute.manager [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1060.116355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1060.116634] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55a147dd-7a6b-4d12-946e-a7350c3d25eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.128083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bd79e5-c80c-40fc-b0fa-4f78ee96993c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.174554] env[62914]: INFO nova.compute.manager [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] instance snapshotting [ 1060.175460] env[62914]: WARNING nova.virt.vmwareapi.vmops [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 60169fa7-3266-4105-b17b-f3677ed2c443 could not be found. [ 1060.175663] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1060.175845] env[62914]: INFO nova.compute.manager [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1060.176104] env[62914]: DEBUG oslo.service.loopingcall [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.177590] env[62914]: DEBUG nova.compute.manager [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1060.177590] env[62914]: DEBUG oslo_concurrency.lockutils [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.177705] env[62914]: DEBUG oslo_concurrency.lockutils [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.177930] env[62914]: DEBUG oslo_concurrency.lockutils [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.178126] env[62914]: DEBUG nova.compute.manager [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] No waiting events found dispatching network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1060.178304] env[62914]: WARNING nova.compute.manager [req-0f607791-81f5-48bd-9c36-ae73f3cf9a0a req-b52da147-3651-4aca-a7cf-7394d9b349f5 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received unexpected event network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 for instance with vm_state building and task_state spawning. [ 1060.181931] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Successfully updated port: 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.183350] env[62914]: DEBUG nova.compute.manager [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1060.183459] env[62914]: DEBUG nova.network.neutron [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1060.185608] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04da165-4431-4506-b1f8-e3441a122be3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.211901] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4496e75d-cd45-446e-8c63-6d01283bf149 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.381177] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113502} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.381989] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1060.385315] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf47ced-8a8d-4c97-a95f-3a00e243e2c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.405533] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] da2af7d4-f311-444a-aa9f-0744e698defb/da2af7d4-f311-444a-aa9f-0744e698defb.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.408548] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5027dcf4-72e0-4d85-ab19-5099b016a110 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.430927] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1060.430927] env[62914]: value = "task-4832517" [ 1060.430927] env[62914]: _type = "Task" [ 1060.430927] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.438774] env[62914]: INFO nova.compute.manager [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Took 24.17 seconds to build instance. [ 1060.445895] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.478313] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa3946d-99f9-4c0b-92c0-f155e5cd3f3b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.486990] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf3b7a8-677e-402e-934c-1ed3ae4bea10 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.526982] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935d6a1a-1fa1-4d70-8001-14d1975b0a13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.535277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61aa031b-040e-4214-8bac-423be8e641e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.549369] env[62914]: DEBUG nova.compute.provider_tree [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.689612] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.689779] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.689934] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1060.724824] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1060.725555] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f5ee7ec5-fec8-4ecf-b428-5094a954f5ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.734205] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1060.734205] env[62914]: value = "task-4832518" [ 1060.734205] env[62914]: _type = "Task" [ 1060.734205] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.745136] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832518, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.835568] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.835964] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.943083] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28b9f87d-e75e-43de-8c6e-5c5fb97bbf7e tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.680s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.943469] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.052946] env[62914]: DEBUG nova.scheduler.client.report [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.121725] env[62914]: DEBUG nova.network.neutron [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.246225] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832518, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.334970] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1061.339281] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1061.443626] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832517, 'name': ReconfigVM_Task, 'duration_secs': 0.953202} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.446349] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Reconfigured VM instance instance-00000065 to attach disk [datastore1] da2af7d4-f311-444a-aa9f-0744e698defb/da2af7d4-f311-444a-aa9f-0744e698defb.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.447035] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a71afbf2-5bac-4304-b9ad-569b35e1d20e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.455936] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1061.455936] env[62914]: value = "task-4832519" [ 1061.455936] env[62914]: _type = "Task" [ 1061.455936] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.469661] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832519, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.559119] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.963s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.562688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.921s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.564486] env[62914]: INFO nova.compute.claims [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.605308] env[62914]: INFO nova.scheduler.client.report [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Deleted allocations for instance aedc785f-619f-4b9f-850f-790f84e57577 [ 1061.624264] env[62914]: INFO nova.compute.manager [-] [instance: 60169fa7-3266-4105-b17b-f3677ed2c443] Took 1.44 seconds to deallocate network for instance. [ 1061.747370] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832518, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.766907] env[62914]: DEBUG nova.network.neutron [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.865931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.966179] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832519, 'name': Rename_Task, 'duration_secs': 0.356225} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.966554] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1061.966825] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8228136-7bf5-4082-a6d6-ce8aada6de00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.973856] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1061.973856] env[62914]: value = "task-4832520" [ 1061.973856] env[62914]: _type = "Task" [ 1061.973856] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.982465] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.114827] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a45c5c48-0c15-4fb0-84ef-14f715847e28 tempest-AttachInterfacesTestJSON-714396526 tempest-AttachInterfacesTestJSON-714396526-project-member] Lock "aedc785f-619f-4b9f-850f-790f84e57577" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.293s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.196624] env[62914]: DEBUG nova.compute.manager [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1062.196624] env[62914]: DEBUG nova.compute.manager [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing instance network info cache due to event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1062.196624] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.246546] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832518, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.270202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.270683] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance network_info: |[{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1062.271061] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.271239] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1062.273016] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:f1:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d6d259f-1ebc-4e49-b6f8-114f414606f7', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.281687] env[62914]: DEBUG oslo.service.loopingcall [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.284351] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1062.284351] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de092396-3f2f-4376-8ac9-8eaca04a4444 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.303738] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.303738] env[62914]: value = "task-4832521" [ 1062.303738] env[62914]: _type = "Task" [ 1062.303738] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.312947] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832521, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.486442] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832520, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.653400] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6bc399ae-154d-4c30-87e7-9b79384da7bb tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "60169fa7-3266-4105-b17b-f3677ed2c443" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.542s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.750904] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832518, 'name': CreateSnapshot_Task, 'duration_secs': 1.562738} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.751690] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1062.752546] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583d511e-9c7b-4f23-82fa-c483395deada {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.826148] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832521, 'name': CreateVM_Task, 'duration_secs': 0.401533} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.826880] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1062.827211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.827313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.827605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1062.827930] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8e7f597-37e5-4625-9d09-778125742b64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.835317] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1062.835317] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ba57a8-a5f1-b9da-4249-f009cf0eb9b4" [ 1062.835317] env[62914]: _type = "Task" [ 1062.835317] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.849470] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ba57a8-a5f1-b9da-4249-f009cf0eb9b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.940210] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7801583d-a6c9-4b50-a6b9-69e310abdcd3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.949672] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fa394c-a270-4ab5-8c91-9227caa74610 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.998133] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b1ded4-5a3e-4cb5-9af8-fa5fdc9878e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.011017] env[62914]: DEBUG oslo_vmware.api [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832520, 'name': PowerOnVM_Task, 'duration_secs': 0.947572} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.011017] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1063.011237] env[62914]: INFO nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Took 10.64 seconds to spawn the instance on the hypervisor. [ 1063.011427] env[62914]: DEBUG nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1063.012314] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1c48af-6fbf-4792-9ace-9d99f701f601 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.017246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4911018-81f3-4200-a51e-225c399aaf2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.033881] env[62914]: DEBUG nova.compute.provider_tree [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.286124] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1063.286440] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ce35ceff-692f-44ed-92a2-2303453eb530 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.295868] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1063.295868] env[62914]: value = "task-4832522" [ 1063.295868] env[62914]: _type = "Task" [ 1063.295868] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.310861] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.329654] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.330229] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.337657] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updated VIF entry in instance network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1063.339708] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.356326] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ba57a8-a5f1-b9da-4249-f009cf0eb9b4, 'name': SearchDatastore_Task, 'duration_secs': 0.014272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.356326] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.356326] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.356625] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.356625] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.356738] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.357348] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57b3a795-1f07-4f9c-bc57-45d496c1b9c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.376264] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.376651] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1063.378371] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5649c158-69e8-463d-bc76-f0516e075faa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.385093] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1063.385093] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233629f-c10a-813d-8384-2f84b0c8876c" [ 1063.385093] env[62914]: _type = "Task" [ 1063.385093] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.393574] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233629f-c10a-813d-8384-2f84b0c8876c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.544254] env[62914]: DEBUG nova.scheduler.client.report [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1063.561909] env[62914]: INFO nova.compute.manager [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Took 25.96 seconds to build instance. [ 1063.812213] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.842640] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.842822] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1063.842941] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Rebuilding the list of instances to heal {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1063.848804] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.849546] env[62914]: DEBUG nova.compute.manager [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Received event network-changed-57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1063.849546] env[62914]: DEBUG nova.compute.manager [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Refreshing instance network info cache due to event network-changed-57890d0b-660c-4230-8104-4d1ae53eb7ce. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1063.849546] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Acquiring lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.849759] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Acquired lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.849759] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Refreshing network info cache for port 57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1063.905021] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233629f-c10a-813d-8384-2f84b0c8876c, 'name': SearchDatastore_Task, 'duration_secs': 0.018745} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.905021] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac525dca-7e7b-42e9-ab85-85b76696b5ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.909812] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1063.909812] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3ac53-fb70-b6b1-4788-913988de4e2c" [ 1063.909812] env[62914]: _type = "Task" [ 1063.909812] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.919668] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3ac53-fb70-b6b1-4788-913988de4e2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.055087] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.055430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.056982] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.057477] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1064.060344] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.385s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.061055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.062668] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.039s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.062878] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.065253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.148s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.065552] env[62914]: DEBUG nova.objects.instance [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lazy-loading 'resources' on Instance uuid 10102941-c31a-4ab1-be5a-801520d49fd7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.071192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-37fb4965-30e1-4b3f-953c-8401d6b32514 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.476s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.096782] env[62914]: INFO nova.scheduler.client.report [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted allocations for instance ec73b924-e132-44b6-bc67-2b3c08592f03 [ 1064.098779] env[62914]: INFO nova.scheduler.client.report [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocations for instance 120fa16e-60cd-4326-b6c4-f1df419dbcb7 [ 1064.307907] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.352342] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1064.352602] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 18329e67-719b-4609-83de-7db2c4096781] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1064.397796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.398046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.398270] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e061304c-998b-4331-b60d-809916844a6f] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1064.398496] env[62914]: DEBUG nova.objects.instance [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lazy-loading 'info_cache' on Instance uuid e061304c-998b-4331-b60d-809916844a6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.421949] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b3ac53-fb70-b6b1-4788-913988de4e2c, 'name': SearchDatastore_Task, 'duration_secs': 0.021829} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.422382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.422575] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1064.422844] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c0c4f31-e25c-42da-b550-fc9ccaf47d44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.432080] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1064.432080] env[62914]: value = "task-4832523" [ 1064.432080] env[62914]: _type = "Task" [ 1064.432080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.443104] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.561243] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1064.572585] env[62914]: DEBUG nova.compute.utils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1064.577184] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1064.577381] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1064.595637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "da2af7d4-f311-444a-aa9f-0744e698defb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.595918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.596145] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.596354] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.596528] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.603674] env[62914]: INFO nova.compute.manager [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Terminating instance [ 1064.610667] env[62914]: DEBUG nova.compute.manager [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1064.610878] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1064.612445] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cfe6e5-baeb-482e-a335-b3d6ae661215 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.616672] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30df9e5b-0765-49ef-84c5-4d31ce64fd92 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "ec73b924-e132-44b6-bc67-2b3c08592f03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.075s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.617693] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5162f392-cf9b-46cc-9abe-aa10324f3b2c tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "120fa16e-60cd-4326-b6c4-f1df419dbcb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.537s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.625285] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1064.625584] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d64b1d4-8260-4642-ae9b-fe16c4c30c0f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.641692] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1064.641692] env[62914]: value = "task-4832524" [ 1064.641692] env[62914]: _type = "Task" [ 1064.641692] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.655031] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.662829] env[62914]: DEBUG nova.policy [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e291489da35649d0a2c69f98714d89ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea39ac6e2d400ca89bbffc20d764ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1064.793045] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updated VIF entry in instance network info cache for port 57890d0b-660c-4230-8104-4d1ae53eb7ce. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1064.793550] env[62914]: DEBUG nova.network.neutron [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updating instance_info_cache with network_info: [{"id": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "address": "fa:16:3e:b7:90:c9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57890d0b-66", "ovs_interfaceid": "57890d0b-660c-4230-8104-4d1ae53eb7ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.809390] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.946433] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832523, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.961572] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8051261e-2615-4acf-adf4-eaf1208aaf03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.970783] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3581ce3-4074-402f-8338-36e83613e197 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.030255] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6b830b-155a-4e4e-a7d1-94aef3b10ed9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.043964] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63d58ce-697c-4275-9f83-caa621394ad5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.063960] env[62914]: DEBUG nova.compute.provider_tree [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.079902] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1065.084847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.155023] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.290218] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Successfully created port: 27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.299244] env[62914]: DEBUG oslo_concurrency.lockutils [req-e4f38e2d-ae0e-4cbe-be88-e411b651f3da req-f93c19cb-961b-4353-9d19-ecdb7ea97397 service nova] Releasing lock "refresh_cache-b285198b-aa95-4dcb-99b3-531d09c210d0" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.309336] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.447725] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832523, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.829713} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.449120] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1065.449120] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.450258] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-837fda54-22af-4c8d-bc90-73eb57bcb285 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.463300] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1065.463300] env[62914]: value = "task-4832525" [ 1065.463300] env[62914]: _type = "Task" [ 1065.463300] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.475648] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.569577] env[62914]: DEBUG nova.scheduler.client.report [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1065.656673] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.813379] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.972842] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.247934} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.973187] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.974405] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57369ff-e755-460e-8163-5e612d848ec1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.000136] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.000336] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e418f64b-ee9f-42c3-b30e-e8af29760a9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.025055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.025844] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.028760] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1066.028760] env[62914]: value = "task-4832526" [ 1066.028760] env[62914]: _type = "Task" [ 1066.028760] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.043277] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.075424] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.078573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.836s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.078573] env[62914]: DEBUG nova.objects.instance [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'pci_requests' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.092068] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1066.111169] env[62914]: INFO nova.scheduler.client.report [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Deleted allocations for instance 10102941-c31a-4ab1-be5a-801520d49fd7 [ 1066.128683] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1066.128970] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1066.129174] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.129412] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1066.129567] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.129718] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1066.129948] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1066.130159] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1066.130344] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1066.130530] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1066.130727] env[62914]: DEBUG nova.virt.hardware [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1066.132026] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4b1d53-b427-4050-936e-7708818baa78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.140795] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c98448-1b7d-4c22-9024-98b08ffeb8c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.154831] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.302034] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e061304c-998b-4331-b60d-809916844a6f] Updating instance_info_cache with network_info: [{"id": "2de06f63-3449-4e6e-af95-5835f882045b", "address": "fa:16:3e:32:91:c9", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de06f63-34", "ovs_interfaceid": "2de06f63-3449-4e6e-af95-5835f882045b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.315048] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.528984] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1066.544308] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832526, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.583157] env[62914]: DEBUG nova.objects.instance [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'numa_topology' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.624036] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3a3c7aa5-3262-49da-9b40-75d09a0801fa tempest-ServersAdminTestJSON-1709758822 tempest-ServersAdminTestJSON-1709758822-project-member] Lock "10102941-c31a-4ab1-be5a-801520d49fd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.643s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.658749] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.806299] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-e061304c-998b-4331-b60d-809916844a6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.806593] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e061304c-998b-4331-b60d-809916844a6f] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 1066.806860] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.807657] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.807960] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.808243] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.808523] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.808797] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.809056] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1066.809278] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.814710] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.054296] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832526, 'name': ReconfigVM_Task, 'duration_secs': 0.866211} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.054536] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Reconfigured VM instance instance-00000066 to attach disk [datastore2] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.058538] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76e5803c-eaf2-4d51-8dce-d239d6239689 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.066272] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1067.066272] env[62914]: value = "task-4832527" [ 1067.066272] env[62914]: _type = "Task" [ 1067.066272] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.076994] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832527, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.078462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.088523] env[62914]: INFO nova.compute.claims [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1067.149593] env[62914]: DEBUG nova.compute.manager [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Received event network-vif-plugged-27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1067.149840] env[62914]: DEBUG oslo_concurrency.lockutils [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] Acquiring lock "18329e67-719b-4609-83de-7db2c4096781-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.150128] env[62914]: DEBUG oslo_concurrency.lockutils [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] Lock "18329e67-719b-4609-83de-7db2c4096781-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.150428] env[62914]: DEBUG oslo_concurrency.lockutils [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] Lock "18329e67-719b-4609-83de-7db2c4096781-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.150663] env[62914]: DEBUG nova.compute.manager [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] No waiting events found dispatching network-vif-plugged-27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1067.150880] env[62914]: WARNING nova.compute.manager [req-74811988-4b21-4774-9dd6-57faacf1a775 req-7d92754a-a8df-4a3d-b8f9-86502d808b53 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Received unexpected event network-vif-plugged-27ba2416-757a-4a8b-a7a5-f3d585ce4899 for instance with vm_state building and task_state spawning. [ 1067.164930] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832524, 'name': PowerOffVM_Task, 'duration_secs': 2.282975} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.165258] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1067.165432] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1067.165690] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bf7ae87-886f-403b-887e-0f80f835528c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.235675] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1067.235880] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1067.236126] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore1] da2af7d4-f311-444a-aa9f-0744e698defb {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.236471] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf10552e-467e-4250-90ac-3a070d31b68f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.245977] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1067.245977] env[62914]: value = "task-4832529" [ 1067.245977] env[62914]: _type = "Task" [ 1067.245977] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.256196] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.312979] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.315737] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Successfully updated port: 27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.321250] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.578745] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832527, 'name': Rename_Task, 'duration_secs': 0.165058} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.581272] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1067.581272] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e591425b-d43c-489b-afac-0255bd149ce2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.586729] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1067.586729] env[62914]: value = "task-4832530" [ 1067.586729] env[62914]: _type = "Task" [ 1067.586729] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.601760] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.763756] env[62914]: DEBUG oslo_vmware.api [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.763756] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.763756] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1067.763756] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1067.763756] env[62914]: INFO nova.compute.manager [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Took 3.15 seconds to destroy the instance on the hypervisor. [ 1067.763924] env[62914]: DEBUG oslo.service.loopingcall [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.764038] env[62914]: DEBUG nova.compute.manager [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1067.764099] env[62914]: DEBUG nova.network.neutron [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1067.815260] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832522, 'name': CloneVM_Task, 'duration_secs': 4.285908} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.817783] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Created linked-clone VM from snapshot [ 1067.819555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65217e38-1eff-497f-b5ce-c25129aee685 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.824888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.824888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.825023] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1067.834422] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Uploading image b0b1438d-527f-4f0d-9f93-01fa019ba07f {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1067.853449] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1067.853805] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-18a8212c-98bd-4abe-b2b0-9a3622e5ea1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.863743] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1067.863743] env[62914]: value = "task-4832531" [ 1067.863743] env[62914]: _type = "Task" [ 1067.863743] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.875645] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832531, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.100312] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832530, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.360715] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1068.377598] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832531, 'name': Destroy_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.420227] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0719e158-cc4e-40a5-af8d-437de40a6221 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.430024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cd5b84-0f9a-4dce-9765-89c2ae8be2c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.467430] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92298bf-677c-4a9a-9d30-a8bb883e4969 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.476774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f50d8b-0be9-41a8-ae11-a178e51bded3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.496757] env[62914]: DEBUG nova.compute.provider_tree [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.599999] env[62914]: DEBUG oslo_vmware.api [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832530, 'name': PowerOnVM_Task, 'duration_secs': 0.519398} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.600345] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1068.600563] env[62914]: INFO nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1068.600866] env[62914]: DEBUG nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1068.601567] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190894f6-116e-4d97-81bc-5264bed94dc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.669289] env[62914]: DEBUG nova.network.neutron [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.879061] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832531, 'name': Destroy_Task, 'duration_secs': 0.830123} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.879406] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Destroyed the VM [ 1068.879676] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1068.879975] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-915f3414-bd50-431f-a6fa-40ccff6e2be8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.887817] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1068.887817] env[62914]: value = "task-4832532" [ 1068.887817] env[62914]: _type = "Task" [ 1068.887817] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.898205] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832532, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.931123] env[62914]: DEBUG nova.network.neutron [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Updating instance_info_cache with network_info: [{"id": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "address": "fa:16:3e:b9:bc:25", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ba2416-75", "ovs_interfaceid": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.000716] env[62914]: DEBUG nova.scheduler.client.report [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1069.121962] env[62914]: INFO nova.compute.manager [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Took 29.14 seconds to build instance. [ 1069.173448] env[62914]: INFO nova.compute.manager [-] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Took 1.41 seconds to deallocate network for instance. [ 1069.227138] env[62914]: DEBUG nova.compute.manager [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Received event network-changed-27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1069.227623] env[62914]: DEBUG nova.compute.manager [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Refreshing instance network info cache due to event network-changed-27ba2416-757a-4a8b-a7a5-f3d585ce4899. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1069.227915] env[62914]: DEBUG oslo_concurrency.lockutils [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] Acquiring lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.401995] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832532, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.433690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.434057] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Instance network_info: |[{"id": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "address": "fa:16:3e:b9:bc:25", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ba2416-75", "ovs_interfaceid": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1069.434379] env[62914]: DEBUG oslo_concurrency.lockutils [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] Acquired lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.434861] env[62914]: DEBUG nova.network.neutron [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Refreshing network info cache for port 27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1069.435917] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:bc:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27ba2416-757a-4a8b-a7a5-f3d585ce4899', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1069.445317] env[62914]: DEBUG oslo.service.loopingcall [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1069.449493] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1069.450455] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-255a6736-36dc-4ec0-aece-62fe415e8453 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.481090] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1069.481090] env[62914]: value = "task-4832533" [ 1069.481090] env[62914]: _type = "Task" [ 1069.481090] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.496526] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832533, 'name': CreateVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.506550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.429s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.509694] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.124s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.511944] env[62914]: INFO nova.compute.claims [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1069.581075] env[62914]: INFO nova.network.neutron [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating port 00706251-f634-4dcb-9705-105152de241f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1069.624736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a1810532-bb3b-4777-931f-cb51aab8e5f9 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.649s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.679018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.727517] env[62914]: DEBUG nova.network.neutron [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Updated VIF entry in instance network info cache for port 27ba2416-757a-4a8b-a7a5-f3d585ce4899. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1069.728056] env[62914]: DEBUG nova.network.neutron [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Updating instance_info_cache with network_info: [{"id": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "address": "fa:16:3e:b9:bc:25", "network": {"id": "42dfbd96-0a8c-4737-b219-2a891ed87086", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1971436626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14ea39ac6e2d400ca89bbffc20d764ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ba2416-75", "ovs_interfaceid": "27ba2416-757a-4a8b-a7a5-f3d585ce4899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.899596] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832532, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.996107] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832533, 'name': CreateVM_Task, 'duration_secs': 0.425102} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.996404] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1069.997257] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.997404] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.997746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1069.998030] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5db48df-ac2a-41a6-a429-7f72c421db83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.003757] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1070.003757] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52df09e1-d1d0-5b41-48c2-f0d9947b9564" [ 1070.003757] env[62914]: _type = "Task" [ 1070.003757] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.014774] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52df09e1-d1d0-5b41-48c2-f0d9947b9564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.237127] env[62914]: DEBUG oslo_concurrency.lockutils [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] Releasing lock "refresh_cache-18329e67-719b-4609-83de-7db2c4096781" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.237127] env[62914]: DEBUG nova.compute.manager [req-85b2efbd-ba7e-48ac-838a-0feaa40a6964 req-9e8bfda0-90d2-4f5d-a4f9-268ca28b0fd9 service nova] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Received event network-vif-deleted-5c3a4ef5-fd55-4dee-8a2a-9710098285f4 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1070.405774] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832532, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.519078] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52df09e1-d1d0-5b41-48c2-f0d9947b9564, 'name': SearchDatastore_Task, 'duration_secs': 0.013661} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.519078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.519078] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1070.519078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.519078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.519078] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1070.519078] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00604a90-ba51-48cb-91a1-45d42e0dca2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.536877] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1070.538814] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1070.538814] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4ebc6a6-d528-45d2-9e71-b2fd65f9818e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.545652] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1070.545652] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ee729-fa31-94cf-1f00-c9f08174edbd" [ 1070.545652] env[62914]: _type = "Task" [ 1070.545652] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.557182] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ee729-fa31-94cf-1f00-c9f08174edbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.873260] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024c1ece-fc34-4de6-bbf0-279d7b44afce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.882871] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cd04e9-a7a3-4939-91a2-50c57963b96d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.900063] env[62914]: DEBUG oslo_vmware.api [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832532, 'name': RemoveSnapshot_Task, 'duration_secs': 1.638072} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.927965] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1070.931621] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3558d372-a69a-4bd6-8294-088882da68e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.940392] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eecb191-03d8-403f-a33c-b2a6cb0c5baa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.955575] env[62914]: DEBUG nova.compute.provider_tree [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.056590] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ee729-fa31-94cf-1f00-c9f08174edbd, 'name': SearchDatastore_Task, 'duration_secs': 0.019758} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.057876] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5253d22-a186-4404-a0e7-5dc10fc2ead4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.063966] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1071.063966] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52251853-094b-ab22-95fb-e7cca16b760e" [ 1071.063966] env[62914]: _type = "Task" [ 1071.063966] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.074349] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52251853-094b-ab22-95fb-e7cca16b760e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.323370] env[62914]: DEBUG nova.compute.manager [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1071.323546] env[62914]: DEBUG nova.compute.manager [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing instance network info cache due to event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1071.323771] env[62914]: DEBUG oslo_concurrency.lockutils [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.323930] env[62914]: DEBUG oslo_concurrency.lockutils [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.324102] env[62914]: DEBUG nova.network.neutron [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1071.434215] env[62914]: WARNING nova.compute.manager [None req-a06da0bf-d054-4faa-944d-59d70a57573c tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Image not found during snapshot: nova.exception.ImageNotFound: Image b0b1438d-527f-4f0d-9f93-01fa019ba07f could not be found. [ 1071.465260] env[62914]: DEBUG nova.scheduler.client.report [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1071.581207] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52251853-094b-ab22-95fb-e7cca16b760e, 'name': SearchDatastore_Task, 'duration_secs': 0.025215} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.581490] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.581750] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 18329e67-719b-4609-83de-7db2c4096781/18329e67-719b-4609-83de-7db2c4096781.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1071.582129] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-955a0602-4ba0-428a-8cc6-d453a6211df9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.590457] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1071.590457] env[62914]: value = "task-4832534" [ 1071.590457] env[62914]: _type = "Task" [ 1071.590457] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.603655] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832534, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.832672] env[62914]: DEBUG nova.compute.manager [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-vif-plugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1071.832964] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.833252] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.833653] env[62914]: DEBUG oslo_concurrency.lockutils [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.834080] env[62914]: DEBUG nova.compute.manager [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] No waiting events found dispatching network-vif-plugged-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1071.834319] env[62914]: WARNING nova.compute.manager [req-6d1fb79e-ffa8-457b-8c02-613f146b4e45 req-e2f26572-b80a-4299-8ed7-ead6ec4f4696 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received unexpected event network-vif-plugged-00706251-f634-4dcb-9705-105152de241f for instance with vm_state shelved_offloaded and task_state spawning. [ 1071.900721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.900721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.900721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.900721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.900721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.903412] env[62914]: INFO nova.compute.manager [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Terminating instance [ 1071.909391] env[62914]: DEBUG nova.compute.manager [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1071.910023] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1071.911876] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ab5952-044f-4c25-b203-d360613ae8d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.921046] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1071.921890] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0b968cf-10de-460c-a2d5-56437153c279 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.931024] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1071.931024] env[62914]: value = "task-4832535" [ 1071.931024] env[62914]: _type = "Task" [ 1071.931024] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.940109] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832535, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.970888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.971855] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1071.975939] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.110s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.977762] env[62914]: INFO nova.compute.claims [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1072.023155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.023155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.023489] env[62914]: DEBUG nova.network.neutron [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1072.103387] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832534, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.265983] env[62914]: DEBUG nova.network.neutron [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updated VIF entry in instance network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1072.265983] env[62914]: DEBUG nova.network.neutron [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.443726] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832535, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.481019] env[62914]: DEBUG nova.compute.utils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1072.481019] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1072.481019] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1072.570177] env[62914]: DEBUG nova.policy [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ea29d6698d4734a5def35fe065fe21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b59bf6daf8c246f7b034dc0adcfc8cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1072.605848] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832534, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631809} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.608892] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 18329e67-719b-4609-83de-7db2c4096781/18329e67-719b-4609-83de-7db2c4096781.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1072.609207] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1072.609972] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7542e49-74bd-4226-972d-9963700c5ead {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.620966] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1072.620966] env[62914]: value = "task-4832536" [ 1072.620966] env[62914]: _type = "Task" [ 1072.620966] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.635699] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.769988] env[62914]: DEBUG oslo_concurrency.lockutils [req-17622172-cf76-4dea-b559-01422fc92803 req-1797e3d5-e934-46b9-af76-ed595a4b41a0 service nova] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.916826] env[62914]: DEBUG nova.network.neutron [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.942154] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832535, 'name': PowerOffVM_Task, 'duration_secs': 0.557399} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.942154] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1072.942154] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1072.942981] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2958dccd-d5a8-474e-9c4b-f5035fb5d4fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.982574] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1073.036242] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1073.036485] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1073.036665] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleting the datastore file [datastore1] 68a77363-c25b-426e-86e2-fa31fc6f0ee1 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.036938] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-694df9d6-effc-4cef-8223-d000e18d4c1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.045847] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1073.045847] env[62914]: value = "task-4832538" [ 1073.045847] env[62914]: _type = "Task" [ 1073.045847] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.061402] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.135461] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077325} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.138221] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.140556] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0432a2e7-8118-4380-b137-dd852626c37f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.165133] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 18329e67-719b-4609-83de-7db2c4096781/18329e67-719b-4609-83de-7db2c4096781.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.169147] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Successfully created port: 371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1073.171191] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7919d67-8cb5-4d63-bff3-26e40d0edd3e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.193144] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1073.193144] env[62914]: value = "task-4832539" [ 1073.193144] env[62914]: _type = "Task" [ 1073.193144] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.202829] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.353553] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25662090-293d-4860-ba0f-f82feba930d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.361844] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d1f284-d2bb-4a75-a0b3-fe9083a3c847 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.399314] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312d124c-b0b9-4169-8ed4-4c900d2115fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.407626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e073de-6572-4b64-979a-2cc53eee8f45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.422849] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.425589] env[62914]: DEBUG nova.compute.provider_tree [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.460263] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='30ff5b70b78124fec8d191cb1f6b2841',container_format='bare',created_at=2025-11-25T11:30:23Z,direct_url=,disk_format='vmdk',id=04d1e2dc-2bd1-433e-b7d2-80c799be344b,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1994952065-shelved',owner='3ffdaa966ecb4979845fda7778c7fb45',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2025-11-25T11:30:40Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1073.460617] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1073.460683] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.460899] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1073.461063] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.461228] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1073.461452] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1073.461609] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1073.461819] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1073.462035] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1073.462721] env[62914]: DEBUG nova.virt.hardware [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1073.463162] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c3951b-ab1c-4b55-a169-68a87dbe756e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.475997] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc101aa-a6a2-412c-8bc0-2561d0bf722e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.497972] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a2:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '456bd8a2-0fb6-4b17-9d25-08e7995c5184', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00706251-f634-4dcb-9705-105152de241f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.512631] env[62914]: DEBUG oslo.service.loopingcall [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.513974] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1073.514347] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fac7d77d-b5a2-45dd-8245-f4ce8ac1d7ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.535878] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.535878] env[62914]: value = "task-4832543" [ 1073.535878] env[62914]: _type = "Task" [ 1073.535878] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.545328] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832543, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.556841] env[62914]: DEBUG oslo_vmware.api [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450434} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.557211] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.557425] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1073.558061] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1073.558061] env[62914]: INFO nova.compute.manager [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1073.558061] env[62914]: DEBUG oslo.service.loopingcall [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.558282] env[62914]: DEBUG nova.compute.manager [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1073.558342] env[62914]: DEBUG nova.network.neutron [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1073.706220] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832539, 'name': ReconfigVM_Task, 'duration_secs': 0.395895} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.706220] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 18329e67-719b-4609-83de-7db2c4096781/18329e67-719b-4609-83de-7db2c4096781.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.706497] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a71095c7-4861-4f64-a338-5f2ae4775ed2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.714873] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1073.714873] env[62914]: value = "task-4832544" [ 1073.714873] env[62914]: _type = "Task" [ 1073.714873] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.726026] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832544, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.876835] env[62914]: DEBUG nova.compute.manager [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-changed-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1073.877087] env[62914]: DEBUG nova.compute.manager [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing instance network info cache due to event network-changed-00706251-f634-4dcb-9705-105152de241f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1073.877332] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.877596] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.877682] env[62914]: DEBUG nova.network.neutron [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Refreshing network info cache for port 00706251-f634-4dcb-9705-105152de241f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1073.929456] env[62914]: DEBUG nova.scheduler.client.report [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1074.014819] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1074.049534] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832543, 'name': CreateVM_Task, 'duration_secs': 0.435542} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.052849] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1074.052849] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1074.057214] env[62914]: DEBUG nova.virt.hardware [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1074.057214] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1074.057214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba52b041-96da-48fc-8982-4a86cfc5e631 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.060012] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1074.060424] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.061030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1074.061510] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a7a8df8-d9b2-4a5a-9e1a-f3db2a70445f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.068845] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1074.068845] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a24248-f8a1-06e0-8486-9fd6cc879f1d" [ 1074.068845] env[62914]: _type = "Task" [ 1074.068845] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.075248] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41eb796a-b3d4-4d9a-adaa-451a9b626414 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.103017] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.103462] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Processing image 04d1e2dc-2bd1-433e-b7d2-80c799be344b {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.103845] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1074.104164] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.104501] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.105287] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f93bc868-bc85-4c3c-9201-628cb3a31034 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.117082] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.117392] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1074.118463] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b351e3c-f658-4ff7-9c50-d2fe120f668a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.125579] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1074.125579] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d81ea5-cca0-57a1-59ad-b143409bc302" [ 1074.125579] env[62914]: _type = "Task" [ 1074.125579] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.134506] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d81ea5-cca0-57a1-59ad-b143409bc302, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.225758] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832544, 'name': Rename_Task, 'duration_secs': 0.146599} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.225758] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1074.225758] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28926786-e000-4359-8dfb-bb5c3abe9849 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.232394] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1074.232394] env[62914]: value = "task-4832545" [ 1074.232394] env[62914]: _type = "Task" [ 1074.232394] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.241164] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.415836] env[62914]: DEBUG nova.network.neutron [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.436307] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.438678] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1074.440563] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.356s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.442422] env[62914]: INFO nova.compute.claims [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.641335] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1074.641335] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Fetch image to [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40/OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1074.641335] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Downloading stream optimized image 04d1e2dc-2bd1-433e-b7d2-80c799be344b to [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40/OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40.vmdk on the data store datastore1 as vApp {{(pid=62914) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1074.641335] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Downloading image file data 04d1e2dc-2bd1-433e-b7d2-80c799be344b to the ESX as VM named 'OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40' {{(pid=62914) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1074.647765] env[62914]: DEBUG nova.network.neutron [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updated VIF entry in instance network info cache for port 00706251-f634-4dcb-9705-105152de241f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1074.649345] env[62914]: DEBUG nova.network.neutron [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.727387] env[62914]: DEBUG nova.compute.manager [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Received event network-vif-plugged-371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1074.727760] env[62914]: DEBUG oslo_concurrency.lockutils [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] Acquiring lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.728175] env[62914]: DEBUG oslo_concurrency.lockutils [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.728556] env[62914]: DEBUG oslo_concurrency.lockutils [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.728988] env[62914]: DEBUG nova.compute.manager [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] No waiting events found dispatching network-vif-plugged-371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1074.729376] env[62914]: WARNING nova.compute.manager [req-748676e7-a036-48ec-b1d6-30d349eb7b13 req-7c5d8cb5-11ab-4009-b4c8-2558c654912c service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Received unexpected event network-vif-plugged-371d22eb-8e46-423e-b4cc-a52d3dbc0879 for instance with vm_state building and task_state spawning. [ 1074.745446] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832545, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.746815] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1074.746815] env[62914]: value = "resgroup-9" [ 1074.746815] env[62914]: _type = "ResourcePool" [ 1074.746815] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1074.746815] env[62914]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-847235b9-290e-4fd8-be33-38c89d39224b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.769723] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lease: (returnval){ [ 1074.769723] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1074.769723] env[62914]: _type = "HttpNfcLease" [ 1074.769723] env[62914]: } obtained for vApp import into resource pool (val){ [ 1074.769723] env[62914]: value = "resgroup-9" [ 1074.769723] env[62914]: _type = "ResourcePool" [ 1074.769723] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1074.770230] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the lease: (returnval){ [ 1074.770230] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1074.770230] env[62914]: _type = "HttpNfcLease" [ 1074.770230] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1074.778717] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1074.778717] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1074.778717] env[62914]: _type = "HttpNfcLease" [ 1074.778717] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1074.917926] env[62914]: INFO nova.compute.manager [-] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Took 1.36 seconds to deallocate network for instance. [ 1074.934923] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Successfully updated port: 371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1074.947951] env[62914]: DEBUG nova.compute.utils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1074.952846] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1074.952929] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1075.023276] env[62914]: DEBUG nova.policy [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f5f245cdbbb48f3a6a46981fad0a139', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd77829ac81cd41f2a4acdd571295ca6d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1075.153158] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c50bf36-53cc-45bc-bf96-50dbcae01e51 req-1548b379-5620-48aa-99e4-b21ff0f36d38 service nova] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.243163] env[62914]: DEBUG oslo_vmware.api [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832545, 'name': PowerOnVM_Task, 'duration_secs': 0.518213} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.243490] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1075.243720] env[62914]: INFO nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1075.243922] env[62914]: DEBUG nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1075.244778] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08903ed5-44f8-4add-80f0-4c0271d01b5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.280926] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1075.280926] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1075.280926] env[62914]: _type = "HttpNfcLease" [ 1075.280926] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1075.425347] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.442009] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.442009] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.442009] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1075.442009] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Successfully created port: d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1075.452982] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1075.768584] env[62914]: INFO nova.compute.manager [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Took 30.15 seconds to build instance. [ 1075.788827] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1075.788827] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1075.788827] env[62914]: _type = "HttpNfcLease" [ 1075.788827] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1075.813857] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9bcd8d-4f13-44d4-9e81-0bad6fb1fa4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.824154] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc05409-a945-44c3-afc0-704e846fa4e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.863045] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83eb24a8-e2de-45e8-9673-9bdd9578fa01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.872924] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fc61ab-443a-4f20-8ab7-39f7508e6b06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.888799] env[62914]: DEBUG nova.compute.provider_tree [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.985092] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.207319] env[62914]: DEBUG nova.compute.manager [req-e96adcc6-1121-409c-8080-f2e4b622c9a2 req-4951d792-da4e-4176-a93b-e7a57207006c service nova] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Received event network-vif-deleted-ac958614-7d81-4f75-b8a8-abc5e626a182 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1076.269224] env[62914]: DEBUG nova.network.neutron [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updating instance_info_cache with network_info: [{"id": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "address": "fa:16:3e:66:39:af", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d22eb-8e", "ovs_interfaceid": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.271444] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ae5280c3-7aec-44b1-a171-21f916eb352d tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.662s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.284216] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.284216] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1076.284216] env[62914]: _type = "HttpNfcLease" [ 1076.284216] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1076.284921] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1076.284921] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219abde-5018-15c8-5c69-890b2e8b8693" [ 1076.284921] env[62914]: _type = "HttpNfcLease" [ 1076.284921] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1076.285903] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3b3ff0-512c-4494-99a7-a2dca7091561 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.300667] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1076.302381] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1076.386491] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-321a365b-c783-4007-a7b6-1030f6917e34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.393822] env[62914]: DEBUG nova.scheduler.client.report [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1076.467969] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1076.505504] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.505765] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.505932] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.506216] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.506426] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.506644] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.506966] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.507170] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.507396] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.507644] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.507844] env[62914]: DEBUG nova.virt.hardware [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.509035] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92961a2e-ab4e-417b-a111-39d4dbc479b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.518705] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4648e4ec-f486-4dd1-bfca-7297d0ba9b4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.759349] env[62914]: DEBUG nova.compute.manager [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Received event network-changed-371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1076.759349] env[62914]: DEBUG nova.compute.manager [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Refreshing instance network info cache due to event network-changed-371d22eb-8e46-423e-b4cc-a52d3dbc0879. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1076.759927] env[62914]: DEBUG oslo_concurrency.lockutils [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] Acquiring lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.775348] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.775348] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Instance network_info: |[{"id": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "address": "fa:16:3e:66:39:af", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d22eb-8e", "ovs_interfaceid": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1076.775348] env[62914]: DEBUG oslo_concurrency.lockutils [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] Acquired lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.775348] env[62914]: DEBUG nova.network.neutron [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Refreshing network info cache for port 371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1076.775348] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:39:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '371d22eb-8e46-423e-b4cc-a52d3dbc0879', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.790676] env[62914]: DEBUG oslo.service.loopingcall [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.795510] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.798017] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af1e1119-afb3-4f24-b0d2-8834d2dabf88 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.823038] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.823038] env[62914]: value = "task-4832548" [ 1076.823038] env[62914]: _type = "Task" [ 1076.823038] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.834950] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832548, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.901904] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.903237] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1076.906744] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.828s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.909015] env[62914]: INFO nova.compute.claims [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.197630] env[62914]: DEBUG nova.network.neutron [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updated VIF entry in instance network info cache for port 371d22eb-8e46-423e-b4cc-a52d3dbc0879. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1077.198019] env[62914]: DEBUG nova.network.neutron [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updating instance_info_cache with network_info: [{"id": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "address": "fa:16:3e:66:39:af", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d22eb-8e", "ovs_interfaceid": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.337408] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832548, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.413931] env[62914]: DEBUG nova.compute.utils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1077.419026] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1077.419026] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1077.467146] env[62914]: DEBUG nova.policy [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1077.469996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "18329e67-719b-4609-83de-7db2c4096781" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.470347] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.470506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "18329e67-719b-4609-83de-7db2c4096781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.470709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.470885] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.474460] env[62914]: INFO nova.compute.manager [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Terminating instance [ 1077.476929] env[62914]: DEBUG nova.compute.manager [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1077.476929] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1077.477952] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d507a590-8541-4d97-b362-8dbae111725a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.486795] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1077.486844] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57fcf1d1-952a-4c46-b09c-efbbd17ab38f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.499096] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1077.499096] env[62914]: value = "task-4832549" [ 1077.499096] env[62914]: _type = "Task" [ 1077.499096] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.509819] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.701310] env[62914]: DEBUG oslo_concurrency.lockutils [req-8d796e93-8023-4140-9d2a-8e82fcdebb10 req-0d664580-3c44-4380-bcef-1c0cb5713bfd service nova] Releasing lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.808657] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Successfully updated port: d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1077.816074] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Successfully created port: e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.843133] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832548, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.918890] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1078.013720] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832549, 'name': PowerOffVM_Task, 'duration_secs': 0.244978} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.014087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1078.014295] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1078.014628] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47300d12-3847-4a58-bb7a-4b9aa414cb85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.089577] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1078.089693] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1078.089849] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleting the datastore file [datastore2] 18329e67-719b-4609-83de-7db2c4096781 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.093417] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc410b98-f4b6-406b-9035-42dcad4b3216 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.104683] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for the task: (returnval){ [ 1078.104683] env[62914]: value = "task-4832551" [ 1078.104683] env[62914]: _type = "Task" [ 1078.104683] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.123129] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.293246] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c8499c-2fdd-4035-ad14-e57c9d6900cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.306325] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b66795f-502c-4b0f-b65d-90c756b11879 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.313523] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.313523] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.313523] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1078.364687] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b3dd2d-df31-44f2-84cb-df0b215d34b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.374103] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832548, 'name': CreateVM_Task, 'duration_secs': 1.298109} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.377130] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1078.380065] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.380273] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.380685] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1078.382080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ece808a-90ec-49e3-b84b-b5cb82dabddb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.387973] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c28c5f4-2695-4860-b743-d77d44938788 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.410471] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1078.410471] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a60ede-a799-2d65-8f74-c906efb91df0" [ 1078.410471] env[62914]: _type = "Task" [ 1078.410471] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.411393] env[62914]: DEBUG nova.compute.provider_tree [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.426043] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52a60ede-a799-2d65-8f74-c906efb91df0, 'name': SearchDatastore_Task, 'duration_secs': 0.022224} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.430279] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.430678] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.431013] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.431237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.431505] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.435048] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33e59384-8959-4c86-bf9e-3595d0190e95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.449489] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.450237] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1078.453579] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc0801c8-5aa6-4f3f-8dea-c91ba88fa337 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.459461] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1078.459461] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f350c8-d1be-94d3-edc2-085d53a0ac64" [ 1078.459461] env[62914]: _type = "Task" [ 1078.459461] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.470217] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f350c8-d1be-94d3-edc2-085d53a0ac64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.619065] env[62914]: DEBUG oslo_vmware.api [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Task: {'id': task-4832551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199387} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.619065] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.619065] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1078.619065] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1078.619065] env[62914]: INFO nova.compute.manager [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] [instance: 18329e67-719b-4609-83de-7db2c4096781] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1078.619065] env[62914]: DEBUG oslo.service.loopingcall [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.619065] env[62914]: DEBUG nova.compute.manager [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1078.619065] env[62914]: DEBUG nova.network.neutron [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1078.895288] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1078.895288] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1078.895678] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed35a01-41f0-4055-8185-4da7a819743d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.900366] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1078.907887] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1078.908428] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1078.909100] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-94bf8789-e0a7-43c3-93be-b68f2bc2da50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.920045] env[62914]: DEBUG nova.scheduler.client.report [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1078.946208] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1078.975754] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f350c8-d1be-94d3-edc2-085d53a0ac64, 'name': SearchDatastore_Task, 'duration_secs': 0.024764} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.976877] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26a72f33-00b3-403d-a7cf-fd669cf8b612 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.986023] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1078.986023] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291b742-961d-a066-b7fa-c3decbb44050" [ 1078.986023] env[62914]: _type = "Task" [ 1078.986023] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.994255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.994255] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.009225] env[62914]: DEBUG nova.compute.manager [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-vif-plugged-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1079.009225] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.009225] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.009225] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.009225] env[62914]: DEBUG nova.compute.manager [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] No waiting events found dispatching network-vif-plugged-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1079.009225] env[62914]: WARNING nova.compute.manager [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received unexpected event network-vif-plugged-d3377942-1cea-43ef-8a80-ebe5519d491c for instance with vm_state building and task_state spawning. [ 1079.009846] env[62914]: DEBUG nova.compute.manager [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1079.010148] env[62914]: DEBUG nova.compute.manager [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing instance network info cache due to event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1079.010454] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.013061] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1079.015052] env[62914]: DEBUG nova.virt.hardware [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.019784] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f585ee9a-f696-4290-ab70-c9b78c295eb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.025758] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5291b742-961d-a066-b7fa-c3decbb44050, 'name': SearchDatastore_Task, 'duration_secs': 0.021825} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.026332] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.026887] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 67ecc3a1-03b0-4881-b5c4-9c4fa244b292/67ecc3a1-03b0-4881-b5c4-9c4fa244b292.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1079.027591] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-110bfccd-005f-4687-8343-6046ae845022 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.033766] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e21f0a-3cf8-4ef5-aa5f-e496a57960e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.040065] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1079.040065] env[62914]: value = "task-4832553" [ 1079.040065] env[62914]: _type = "Task" [ 1079.040065] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.060383] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.239891] env[62914]: DEBUG oslo_vmware.rw_handles [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528ac1d8-0583-6883-3994-02884b34ee9e/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1079.240121] env[62914]: INFO nova.virt.vmwareapi.images [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Downloaded image file data 04d1e2dc-2bd1-433e-b7d2-80c799be344b [ 1079.242299] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c579423-3d3f-483b-86b4-62cd12c96692 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.259936] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b6da86a-569d-4ef4-ac57-5d24db49c3c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.304224] env[62914]: DEBUG nova.network.neutron [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.398913] env[62914]: INFO nova.virt.vmwareapi.images [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] The imported VM was unregistered [ 1079.402159] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1079.402603] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Creating directory with path [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.402975] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd4ac72f-20c4-49cc-be97-056e73460902 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.425178] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.426019] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1079.428788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.108s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.428788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.428970] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1079.429274] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.751s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.429501] env[62914]: DEBUG nova.objects.instance [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid da2af7d4-f311-444a-aa9f-0744e698defb {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.438044] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898fe18c-3a2f-4f85-b4a0-386699dabc50 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.443155] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Created directory with path [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.443380] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40/OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40.vmdk to [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk. {{(pid=62914) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1079.445265] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1efc5604-ab0f-4824-94ec-1a0b57373d21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.454449] env[62914]: DEBUG nova.compute.manager [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Received event network-vif-plugged-e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1079.454695] env[62914]: DEBUG oslo_concurrency.lockutils [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] Acquiring lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.454994] env[62914]: DEBUG oslo_concurrency.lockutils [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.455132] env[62914]: DEBUG oslo_concurrency.lockutils [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.455311] env[62914]: DEBUG nova.compute.manager [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] No waiting events found dispatching network-vif-plugged-e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1079.455492] env[62914]: WARNING nova.compute.manager [req-b934e7f4-f073-43ce-a50d-29863d75fe1e req-372a7743-d11d-4b3a-a963-e26b94810f86 service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Received unexpected event network-vif-plugged-e159d0dc-e550-43fb-aa88-9a9b59bdc767 for instance with vm_state building and task_state spawning. [ 1079.465342] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8862918-cf22-4ca1-baca-2b3d77c89a98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.470393] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1079.470393] env[62914]: value = "task-4832555" [ 1079.470393] env[62914]: _type = "Task" [ 1079.470393] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.489382] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c573226e-8e61-486d-b9e9-9c3ffba3a3c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.492787] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.499448] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb610a4c-fc88-45a6-a090-4b46c3171bab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.508299] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1079.541031] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179190MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1079.541289] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.553950] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832553, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.556406] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Successfully updated port: e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.807718] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.808337] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Instance network_info: |[{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1079.808527] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.808624] env[62914]: DEBUG nova.network.neutron [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1079.810151] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:e3:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3377942-1cea-43ef-8a80-ebe5519d491c', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1079.819628] env[62914]: DEBUG oslo.service.loopingcall [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.824382] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1079.825290] env[62914]: DEBUG nova.network.neutron [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.827639] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fd76685-206c-4518-b4a8-d1062b601a28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.858594] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1079.858594] env[62914]: value = "task-4832556" [ 1079.858594] env[62914]: _type = "Task" [ 1079.858594] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.871669] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832556, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.937961] env[62914]: DEBUG nova.compute.utils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.946217] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1079.946440] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1079.987214] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.045036] env[62914]: DEBUG nova.policy [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a8cfcd0aed9499a83c09052328647cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '894c73ea90624428afeb1165afbbfa9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1080.050211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.063074] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.063286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.063519] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1080.064964] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832553, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.162831] env[62914]: DEBUG nova.network.neutron [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updated VIF entry in instance network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1080.163372] env[62914]: DEBUG nova.network.neutron [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.325893] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6f7e4e-2375-41eb-9a8e-515e5e81964c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.330743] env[62914]: INFO nova.compute.manager [-] [instance: 18329e67-719b-4609-83de-7db2c4096781] Took 1.71 seconds to deallocate network for instance. [ 1080.341383] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cbd21b-553f-4f18-8ea5-e6e15daa76f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.387748] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b84fbb-be01-45b6-b30d-462dbc9ee773 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.401169] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfb3a49-0650-4ff9-9533-25bfa33f4c9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.406297] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832556, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.420068] env[62914]: DEBUG nova.compute.provider_tree [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.452823] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1080.493942] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.565696] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832553, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.133401} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.568195] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 67ecc3a1-03b0-4881-b5c4-9c4fa244b292/67ecc3a1-03b0-4881-b5c4-9c4fa244b292.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1080.568606] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.568964] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d67e84f-a237-489b-9335-568c3b4850db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.580032] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1080.580032] env[62914]: value = "task-4832558" [ 1080.580032] env[62914]: _type = "Task" [ 1080.580032] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.594270] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.617824] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1080.660249] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Successfully created port: 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.666777] env[62914]: DEBUG oslo_concurrency.lockutils [req-56a462e9-5278-4590-9c95-053b7b68583c req-54be821a-815c-41ec-b3fd-71606b20f3c9 service nova] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.836390] env[62914]: DEBUG nova.network.neutron [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Updating instance_info_cache with network_info: [{"id": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "address": "fa:16:3e:40:fa:59", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape159d0dc-e5", "ovs_interfaceid": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.842114] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.896995] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832556, 'name': CreateVM_Task, 'duration_secs': 0.833647} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.897508] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1080.898138] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.898389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.898802] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1080.899175] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afb1daca-b0ac-4148-843e-7011eaf3bd6c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.906242] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1080.906242] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5266157f-72a8-6972-54a1-0e65150f8931" [ 1080.906242] env[62914]: _type = "Task" [ 1080.906242] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.917559] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5266157f-72a8-6972-54a1-0e65150f8931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.923311] env[62914]: DEBUG nova.scheduler.client.report [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1080.983993] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.046716] env[62914]: DEBUG nova.compute.manager [req-2c07bce8-e7f9-4cb8-b9a5-1cc80b67d13f req-f0952273-eff9-4aab-98ed-037395f22187 service nova] [instance: 18329e67-719b-4609-83de-7db2c4096781] Received event network-vif-deleted-27ba2416-757a-4a8b-a7a5-f3d585ce4899 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1081.093134] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138533} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.093493] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.094737] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ccf55d-f42b-4e80-84cb-c61d9c39bce5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.120655] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 67ecc3a1-03b0-4881-b5c4-9c4fa244b292/67ecc3a1-03b0-4881-b5c4-9c4fa244b292.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.121240] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8e0a7b2-cc88-4740-a494-0a5d91e8185f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.146244] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1081.146244] env[62914]: value = "task-4832559" [ 1081.146244] env[62914]: _type = "Task" [ 1081.146244] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.157147] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.339392] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.339849] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Instance network_info: |[{"id": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "address": "fa:16:3e:40:fa:59", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape159d0dc-e5", "ovs_interfaceid": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1081.340371] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:fa:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e159d0dc-e550-43fb-aa88-9a9b59bdc767', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.349808] env[62914]: DEBUG oslo.service.loopingcall [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.350201] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1081.350540] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d56e3e27-7c11-44de-8f61-de21d7af4099 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.378279] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.378279] env[62914]: value = "task-4832560" [ 1081.378279] env[62914]: _type = "Task" [ 1081.378279] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.388852] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832560, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.425337] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5266157f-72a8-6972-54a1-0e65150f8931, 'name': SearchDatastore_Task, 'duration_secs': 0.088286} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.425337] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.425337] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.425337] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.425337] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.425337] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.425991] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-100e8217-4d70-418c-a44f-5729b8e0cfda {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.429000] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.432058] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.007s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.432607] env[62914]: DEBUG nova.objects.instance [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lazy-loading 'resources' on Instance uuid 68a77363-c25b-426e-86e2-fa31fc6f0ee1 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.452021] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.452021] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1081.453208] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34878de9-ded7-498a-8f98-6e2da0fea50d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.464760] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1081.468660] env[62914]: INFO nova.scheduler.client.report [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance da2af7d4-f311-444a-aa9f-0744e698defb [ 1081.473761] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1081.473761] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d9ed91-14b0-6ac7-abb9-842661c3d16d" [ 1081.473761] env[62914]: _type = "Task" [ 1081.473761] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.499242] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.505243] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d9ed91-14b0-6ac7-abb9-842661c3d16d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.522080] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1081.522080] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1081.522080] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.522080] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1081.522080] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.522733] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1081.522733] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1081.522733] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1081.522911] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1081.523233] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1081.523451] env[62914]: DEBUG nova.virt.hardware [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.524804] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9c3950-73d3-43c2-baf3-a7ec79b1e3e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.537053] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d67922b-254c-43a6-8dee-4bf18973519e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.607289] env[62914]: DEBUG nova.compute.manager [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Received event network-changed-e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1081.607289] env[62914]: DEBUG nova.compute.manager [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Refreshing instance network info cache due to event network-changed-e159d0dc-e550-43fb-aa88-9a9b59bdc767. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1081.607416] env[62914]: DEBUG oslo_concurrency.lockutils [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] Acquiring lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.607566] env[62914]: DEBUG oslo_concurrency.lockutils [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] Acquired lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.607735] env[62914]: DEBUG nova.network.neutron [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Refreshing network info cache for port e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1081.662231] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832559, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.663575] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.663816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.895521] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832560, 'name': CreateVM_Task, 'duration_secs': 0.389205} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.895721] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1081.896621] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.896838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.897203] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1081.897512] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ae2ee47-dfbf-4fd6-9e57-a59c187fe23b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.907312] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1081.907312] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e4f6c4-5eba-4e33-80d8-cbdd8e60ab0d" [ 1081.907312] env[62914]: _type = "Task" [ 1081.907312] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.920804] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e4f6c4-5eba-4e33-80d8-cbdd8e60ab0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.995719] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e8153c14-2e38-4273-8b6f-0d6898123eb3 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "da2af7d4-f311-444a-aa9f-0744e698defb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.400s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.004206] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d9ed91-14b0-6ac7-abb9-842661c3d16d, 'name': SearchDatastore_Task, 'duration_secs': 0.093996} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.008299] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.011783] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a3fc34-05e8-4e60-ac4b-281ff7ef907f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.022069] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1082.022069] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eb313d-6150-c5c2-0a68-aa2ebdd07cc5" [ 1082.022069] env[62914]: _type = "Task" [ 1082.022069] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.038762] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eb313d-6150-c5c2-0a68-aa2ebdd07cc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.167539] env[62914]: DEBUG nova.compute.utils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1082.169864] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832559, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.231791] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf80523-4747-464f-9d8f-9a23f1c8b7d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.247340] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18a960c-93c5-4c49-97f4-8f3e00f5f37b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.286040] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5cf43c-6dca-406a-9acc-a2f0bc1d00cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.298382] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff4f994-7370-4e13-b1fc-274c6211e2fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.317136] env[62914]: DEBUG nova.compute.provider_tree [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.377045] env[62914]: DEBUG nova.network.neutron [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Updated VIF entry in instance network info cache for port e159d0dc-e550-43fb-aa88-9a9b59bdc767. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1082.377553] env[62914]: DEBUG nova.network.neutron [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Updating instance_info_cache with network_info: [{"id": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "address": "fa:16:3e:40:fa:59", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape159d0dc-e5", "ovs_interfaceid": "e159d0dc-e550-43fb-aa88-9a9b59bdc767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.389447] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.389447] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.389447] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.389737] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.389806] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.392176] env[62914]: INFO nova.compute.manager [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Terminating instance [ 1082.398239] env[62914]: DEBUG nova.compute.manager [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1082.398734] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1082.399356] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c01393-e576-4267-b89a-b3eaa48d9583 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.408473] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1082.411776] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3bea8d2-1aa3-4af7-8edf-04bcb915c65b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.419995] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e4f6c4-5eba-4e33-80d8-cbdd8e60ab0d, 'name': SearchDatastore_Task, 'duration_secs': 0.047843} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.421526] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.421796] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.422089] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.422248] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.422465] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.422815] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1082.422815] env[62914]: value = "task-4832561" [ 1082.422815] env[62914]: _type = "Task" [ 1082.422815] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.423023] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f895d79d-08e3-4073-bf76-f2bc9ad2500e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.435341] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.442098] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.442098] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1082.442098] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6620f0bf-39f0-464a-aa63-bfa145196aab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.448615] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1082.448615] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5227bb41-4fc8-a508-b72b-343de901f6fd" [ 1082.448615] env[62914]: _type = "Task" [ 1082.448615] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.457722] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5227bb41-4fc8-a508-b72b-343de901f6fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.466238] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Successfully updated port: 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.495216] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.536207] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52eb313d-6150-c5c2-0a68-aa2ebdd07cc5, 'name': SearchDatastore_Task, 'duration_secs': 0.087236} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.536858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.537178] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1082.537592] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bbef599-aa3b-4710-875b-73b27f120ab4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.547664] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1082.547664] env[62914]: value = "task-4832562" [ 1082.547664] env[62914]: _type = "Task" [ 1082.547664] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.559763] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.660977] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832559, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.672051] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.820725] env[62914]: DEBUG nova.scheduler.client.report [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1082.880670] env[62914]: DEBUG oslo_concurrency.lockutils [req-775101e5-3fb8-4dd6-82a2-3eaef803fa02 req-2806fe72-958a-4f36-8b90-2b484038644b service nova] Releasing lock "refresh_cache-6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.937101] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832561, 'name': PowerOffVM_Task, 'duration_secs': 0.266394} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.937426] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1082.937634] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1082.937946] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b578858e-0520-4d55-8f7b-f7629c517b76 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.960162] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5227bb41-4fc8-a508-b72b-343de901f6fd, 'name': SearchDatastore_Task, 'duration_secs': 0.049156} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.961118] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a66cae2-f2dd-4b55-99a1-d5a0ea2eef0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.968027] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1082.968027] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52927abb-1dc0-ff22-eb5a-82cdd0e86efb" [ 1082.968027] env[62914]: _type = "Task" [ 1082.968027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.968609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.968757] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.969194] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1082.982550] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52927abb-1dc0-ff22-eb5a-82cdd0e86efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.997622] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832555, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.492041} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.998064] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40/OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40.vmdk to [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk. [ 1082.998321] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Cleaning up location [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1082.998627] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_ac6f7122-3414-4c14-a6f4-33929f0e4f40 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.998867] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24a3d6c0-f853-4e4a-a816-8eb1b04ffd00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.008023] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1083.008023] env[62914]: value = "task-4832565" [ 1083.008023] env[62914]: _type = "Task" [ 1083.008023] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.020806] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.022434] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1083.022722] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1083.022934] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore2] 417d4287-0f76-4d2e-b1da-43455d7ff3e6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.023192] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c577b04-346e-4a3f-ae50-99a0e86735df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.030977] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1083.030977] env[62914]: value = "task-4832566" [ 1083.030977] env[62914]: _type = "Task" [ 1083.030977] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.040679] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.058717] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832562, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.108397] env[62914]: DEBUG nova.compute.manager [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1083.108689] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.108920] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.109120] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.109304] env[62914]: DEBUG nova.compute.manager [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] No waiting events found dispatching network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1083.109479] env[62914]: WARNING nova.compute.manager [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received unexpected event network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 for instance with vm_state building and task_state spawning. [ 1083.109644] env[62914]: DEBUG nova.compute.manager [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1083.109806] env[62914]: DEBUG nova.compute.manager [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing instance network info cache due to event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1083.109977] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.161694] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832559, 'name': ReconfigVM_Task, 'duration_secs': 1.94403} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.162043] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 67ecc3a1-03b0-4881-b5c4-9c4fa244b292/67ecc3a1-03b0-4881-b5c4-9c4fa244b292.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.162911] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35edcf91-f083-4899-8d9b-bc7d2a4c7d58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.170837] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1083.170837] env[62914]: value = "task-4832567" [ 1083.170837] env[62914]: _type = "Task" [ 1083.170837] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.182029] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832567, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.326283] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.329043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.788s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.365569] env[62914]: INFO nova.scheduler.client.report [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted allocations for instance 68a77363-c25b-426e-86e2-fa31fc6f0ee1 [ 1083.483167] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52927abb-1dc0-ff22-eb5a-82cdd0e86efb, 'name': SearchDatastore_Task, 'duration_secs': 0.02046} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.483490] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.483763] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce/6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1083.484052] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ce3cba3-35d5-408a-b703-3ff69a3a3fdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.491618] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1083.491618] env[62914]: value = "task-4832568" [ 1083.491618] env[62914]: _type = "Task" [ 1083.491618] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.503395] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.512499] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1083.520961] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071347} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.521260] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.521824] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.521824] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk to [datastore1] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1083.522026] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf0fe2c6-d14e-496e-8837-630e924a7f39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.529268] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1083.529268] env[62914]: value = "task-4832569" [ 1083.529268] env[62914]: _type = "Task" [ 1083.529268] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.546798] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.550139] env[62914]: DEBUG oslo_vmware.api [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.449084} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.555397] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.555721] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1083.555954] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1083.556194] env[62914]: INFO nova.compute.manager [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1083.556495] env[62914]: DEBUG oslo.service.loopingcall [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.556728] env[62914]: DEBUG nova.compute.manager [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1083.556833] env[62914]: DEBUG nova.network.neutron [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1083.566710] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.910549} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.566882] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1083.567650] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.567650] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5357481e-becb-4718-8fef-e2eb5fe78794 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.578188] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1083.578188] env[62914]: value = "task-4832570" [ 1083.578188] env[62914]: _type = "Task" [ 1083.578188] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.590553] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.682379] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832567, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.756897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.757266] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.757921] env[62914]: INFO nova.compute.manager [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Attaching volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 to /dev/sdb [ 1083.798879] env[62914]: DEBUG nova.network.neutron [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.820648] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d00c3a-c789-4745-9fe0-1305f2afbdc2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.830482] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12390ef4-a5c7-4671-a601-905e2f06f0ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.860528] env[62914]: DEBUG nova.virt.block_device [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating existing volume attachment record: 861f9d94-f33e-4c6b-a42d-01de1cf132fd {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1083.881291] env[62914]: DEBUG oslo_concurrency.lockutils [None req-814a8f80-99f9-4d6d-8b21-8bb77b5ccacc tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "68a77363-c25b-426e-86e2-fa31fc6f0ee1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.983s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.004768] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.040265] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.089478] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089173} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.089803] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.094021] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835d3b42-2243-486b-8948-5319419c590d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.096384] env[62914]: DEBUG nova.compute.manager [req-eee6d0da-14e0-4d87-9110-ba6bffd8a8d9 req-c7e567f7-ef0c-431b-add3-5b6b73edb859 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Received event network-vif-deleted-87adb6e9-f00f-4164-8903-0a82d32416ca {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1084.096589] env[62914]: INFO nova.compute.manager [req-eee6d0da-14e0-4d87-9110-ba6bffd8a8d9 req-c7e567f7-ef0c-431b-add3-5b6b73edb859 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Neutron deleted interface 87adb6e9-f00f-4164-8903-0a82d32416ca; detaching it from the instance and deleting it from the info cache [ 1084.096766] env[62914]: DEBUG nova.network.neutron [req-eee6d0da-14e0-4d87-9110-ba6bffd8a8d9 req-c7e567f7-ef0c-431b-add3-5b6b73edb859 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.119256] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.120763] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2befdc03-c70e-46f6-b37b-c5fc2b4de977 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.149482] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1084.149482] env[62914]: value = "task-4832573" [ 1084.149482] env[62914]: _type = "Task" [ 1084.149482] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.152604] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832573, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.184847] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832567, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.302334] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.302764] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance network_info: |[{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1084.303111] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.303307] env[62914]: DEBUG nova.network.neutron [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1084.304547] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:83:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '949a1716-cbb0-44a7-a0f6-4d27a45071e0', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1084.313340] env[62914]: DEBUG oslo.service.loopingcall [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.314457] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1084.314697] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be338133-4e81-42bf-b560-ed1140cb5102 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.336636] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1084.336636] env[62914]: value = "task-4832575" [ 1084.336636] env[62914]: _type = "Task" [ 1084.336636] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.349191] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832575, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.363622] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e061304c-998b-4331-b60d-809916844a6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.363804] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.363910] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance af141439-1c36-4184-9775-d1e30ee77ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364043] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 06e8b438-01ef-481f-8e27-2faa01bb97aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364195] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364308] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364428] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 417d4287-0f76-4d2e-b1da-43455d7ff3e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364513] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 3b26b5d7-524a-41af-ab75-a158568e031e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364630] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b285198b-aa95-4dcb-99b3-531d09c210d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364753] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e730b472-fca8-4041-a00c-91bee25232f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.364901] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 455965de-816d-4ab2-9d5e-a12b06893e6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.365180] env[62914]: WARNING nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 18329e67-719b-4609-83de-7db2c4096781 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1084.365299] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 67ecc3a1-03b0-4881-b5c4-9c4fa244b292 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.365393] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.365490] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.365599] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 88acf376-122d-4796-8400-dfc4c7ec45d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1084.403424] env[62914]: DEBUG nova.network.neutron [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.502320] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.541039] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.599788] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-825861b3-09a2-4146-a892-908f6b2ba27b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.610326] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e8d148-7cdb-4193-8a40-6957dc539134 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.654957] env[62914]: DEBUG nova.compute.manager [req-eee6d0da-14e0-4d87-9110-ba6bffd8a8d9 req-c7e567f7-ef0c-431b-add3-5b6b73edb859 service nova] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Detach interface failed, port_id=87adb6e9-f00f-4164-8903-0a82d32416ca, reason: Instance 417d4287-0f76-4d2e-b1da-43455d7ff3e6 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1084.664927] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832573, 'name': ReconfigVM_Task, 'duration_secs': 0.409578} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.665298] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.666042] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bcfb9b8-84a3-4a69-9102-3a5043f1d542 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.673835] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1084.673835] env[62914]: value = "task-4832577" [ 1084.673835] env[62914]: _type = "Task" [ 1084.673835] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.690645] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "4648e825-359d-497f-99b4-cbc51b135860" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.690904] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.692490] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832567, 'name': Rename_Task, 'duration_secs': 1.399585} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.697250] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1084.697736] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832577, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.698727] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca94ae96-3059-4f40-a026-3a728fad7e3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.707906] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1084.707906] env[62914]: value = "task-4832578" [ 1084.707906] env[62914]: _type = "Task" [ 1084.707906] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.720703] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.850019] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832575, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.869661] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance c56b9ad3-8c89-44ee-8ee9-8e256bcad573 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1084.906715] env[62914]: INFO nova.compute.manager [-] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Took 1.35 seconds to deallocate network for instance. [ 1085.003860] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832568, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.044129] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.076529] env[62914]: DEBUG nova.network.neutron [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updated VIF entry in instance network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1085.076971] env[62914]: DEBUG nova.network.neutron [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.195297] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832577, 'name': Rename_Task, 'duration_secs': 0.220514} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.195297] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1085.195297] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cd63cd4-6c59-446e-a37a-dc87edd346b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.195297] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1085.205944] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1085.205944] env[62914]: value = "task-4832579" [ 1085.205944] env[62914]: _type = "Task" [ 1085.205944] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.219973] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.223929] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.348817] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832575, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.373062] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4648e825-359d-497f-99b4-cbc51b135860 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1085.373062] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1085.373289] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=100GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '15', 'num_vm_active': '9', 'num_task_None': '9', 'num_os_type_None': '15', 'num_proj_b19293a423174c20963c000441db100e': '2', 'io_workload': '4', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '5', 'num_proj_3ffdaa966ecb4979845fda7778c7fb45': '1', 'num_proj_d141c01c1d5848eea6ef2b831e431ba5': '2', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '2', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '2', 'num_proj_d77829ac81cd41f2a4acdd571295ca6d': '3', 'num_vm_rescued': '1', 'num_task_deleting': '1', 'num_proj_adf406f1352240aba2338e64b8f182b4': '1', 'num_vm_building': '4', 'num_proj_4860bec4a28e4289b7a508f007fff452': '1', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1085.415113] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.507317] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832568, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.871653} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.508359] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce/6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1085.508359] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.508359] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-797e44fa-f5cb-43e6-89d3-7ac6f5bd77e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.516768] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1085.516768] env[62914]: value = "task-4832580" [ 1085.516768] env[62914]: _type = "Task" [ 1085.516768] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.526878] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.542083] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.579873] env[62914]: DEBUG oslo_concurrency.lockutils [req-eec6f82f-0531-44f5-a101-ea2c7137e35b req-56625367-58cf-4dee-87e3-8a810b026f88 service nova] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.648264] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f967dc5-2e7b-4447-98ad-ba5777df9d28 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.657282] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acced862-9645-47bb-9ca2-1d72deb90b42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.691231] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6ff914-1278-494b-b6da-2e0051417e2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.702896] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac699f72-7be4-4509-8684-dc78771989aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.725236] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.731210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.731748] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832579, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.740123] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.851712] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832575, 'name': CreateVM_Task, 'duration_secs': 1.366537} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.851914] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1085.852700] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.852878] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.853242] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1085.853598] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efb6832f-e9ae-4123-9706-ca35c781d5b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.860996] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1085.860996] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522078b4-7fad-10f6-7a5c-38d511f17c41" [ 1085.860996] env[62914]: _type = "Task" [ 1085.860996] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.872860] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522078b4-7fad-10f6-7a5c-38d511f17c41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.029706] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.343711} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.029997] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.030995] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b78a3b-4606-4952-b529-33feac99954b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.043310] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.062249] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce/6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.062669] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db3066b6-ecc4-455c-a981-005621b69dbf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.085010] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1086.085010] env[62914]: value = "task-4832581" [ 1086.085010] env[62914]: _type = "Task" [ 1086.085010] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.096832] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.227090] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.230494] env[62914]: DEBUG oslo_vmware.api [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832579, 'name': PowerOnVM_Task, 'duration_secs': 0.594386} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.230802] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1086.231022] env[62914]: INFO nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1086.231336] env[62914]: DEBUG nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1086.232064] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efa628e-8c94-4d28-8513-00dd6c4a3863 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.235462] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1086.377639] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522078b4-7fad-10f6-7a5c-38d511f17c41, 'name': SearchDatastore_Task, 'duration_secs': 0.045332} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.378616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.378616] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1086.378616] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.378870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.379045] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.379374] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa0850f4-871d-438b-abf4-48359e25ccc9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.395573] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.395785] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1086.396766] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1442aa1a-7a5a-4810-bc65-cd948f8adcf2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.408750] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1086.408750] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b2e8a-be80-7a0f-2d32-f9b4839b3c0a" [ 1086.408750] env[62914]: _type = "Task" [ 1086.408750] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.420641] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b2e8a-be80-7a0f-2d32-f9b4839b3c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.547767] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.597532] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832581, 'name': ReconfigVM_Task, 'duration_secs': 0.352898} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.597965] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce/6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.598704] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9204e2ab-36f0-4776-aed5-1ae041e5ca82 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.609285] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1086.609285] env[62914]: value = "task-4832583" [ 1086.609285] env[62914]: _type = "Task" [ 1086.609285] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.620423] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832583, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.731456] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.741105] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1086.741438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.412s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.741821] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.692s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.743976] env[62914]: INFO nova.compute.claims [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.761412] env[62914]: INFO nova.compute.manager [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Took 24.91 seconds to build instance. [ 1086.924736] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521b2e8a-be80-7a0f-2d32-f9b4839b3c0a, 'name': SearchDatastore_Task, 'duration_secs': 0.045929} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.926430] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aec80f3-2e10-40e2-a578-929a2a8f671a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.937441] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1086.937441] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52300680-9a60-78a7-c40c-183e942e9dc6" [ 1086.937441] env[62914]: _type = "Task" [ 1086.937441] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.950351] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52300680-9a60-78a7-c40c-183e942e9dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.047098] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.125811] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832583, 'name': Rename_Task, 'duration_secs': 0.177512} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.126279] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1087.126644] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7c3426b-d097-47e0-a83a-b145ee29494e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.138220] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1087.138220] env[62914]: value = "task-4832585" [ 1087.138220] env[62914]: _type = "Task" [ 1087.138220] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.150804] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.235236] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.263888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52ec0873-4b56-4a4d-b9ff-ed918ebf2524 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.428s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.448214] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52300680-9a60-78a7-c40c-183e942e9dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.043129} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.448528] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.448810] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1087.449272] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c6d8b5c-ce42-4102-ac04-6be9fed44b37 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.457382] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1087.457382] env[62914]: value = "task-4832586" [ 1087.457382] env[62914]: _type = "Task" [ 1087.457382] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.471369] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.546477] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832569, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.71139} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.547179] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/04d1e2dc-2bd1-433e-b7d2-80c799be344b/04d1e2dc-2bd1-433e-b7d2-80c799be344b.vmdk to [datastore1] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1087.548232] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbed7c2a-eefd-4210-95fc-4df2dcc1b455 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.579025] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.579025] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdaee7a2-98c9-41d4-a795-ed2cd7d2f370 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.603903] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1087.603903] env[62914]: value = "task-4832587" [ 1087.603903] env[62914]: _type = "Task" [ 1087.603903] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.615095] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.651029] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832585, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.734801] env[62914]: DEBUG oslo_vmware.api [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832578, 'name': PowerOnVM_Task, 'duration_secs': 2.707472} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.734910] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1087.735252] env[62914]: INFO nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Took 13.72 seconds to spawn the instance on the hypervisor. [ 1087.735480] env[62914]: DEBUG nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1087.737457] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b009c240-795e-4f4d-9cf8-53b35ba3fdcd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.786206] env[62914]: DEBUG nova.compute.manager [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1087.786352] env[62914]: DEBUG nova.compute.manager [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing instance network info cache due to event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1087.786645] env[62914]: DEBUG oslo_concurrency.lockutils [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.787037] env[62914]: DEBUG oslo_concurrency.lockutils [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.787097] env[62914]: DEBUG nova.network.neutron [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1087.968984] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.034188] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841a058a-41b8-4fcc-ab9a-01e653966bb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.042680] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1e8afe-c623-41a5-ab78-56cb0e84174e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.077967] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de31bd35-770c-4559-bd3b-d4abc7bcd2c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.087298] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47049ce8-abdb-44bd-94d4-33c5d8a917bc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.102239] env[62914]: DEBUG nova.compute.provider_tree [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.114290] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.149384] env[62914]: DEBUG oslo_vmware.api [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832585, 'name': PowerOnVM_Task, 'duration_secs': 0.567772} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.149816] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1088.150032] env[62914]: INFO nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1088.150269] env[62914]: DEBUG nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1088.151141] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2901535f-35a4-48aa-980e-5cfb1c07efdd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.258348] env[62914]: INFO nova.compute.manager [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Took 29.90 seconds to build instance. [ 1088.429249] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1088.429603] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1088.431591] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bf1d1b-ef44-4180-b60a-342ab16096ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.456128] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe3d7b3-a128-49ed-aa2e-626832d1ff0e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.473869] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832586, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.500250] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.504705] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-559823cf-f077-4cff-b6f2-e107f5e237b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.527526] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1088.527526] env[62914]: value = "task-4832588" [ 1088.527526] env[62914]: _type = "Task" [ 1088.527526] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.541435] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.606388] env[62914]: DEBUG nova.scheduler.client.report [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.616021] env[62914]: DEBUG nova.network.neutron [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updated VIF entry in instance network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1088.616021] env[62914]: DEBUG nova.network.neutron [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.631393] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832587, 'name': ReconfigVM_Task, 'duration_secs': 0.854929} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.633444] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 455965de-816d-4ab2-9d5e-a12b06893e6f/455965de-816d-4ab2-9d5e-a12b06893e6f.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.634891] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7462489a-ed80-4705-9c7b-2cf0c1cee140 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.651738] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1088.651738] env[62914]: value = "task-4832589" [ 1088.651738] env[62914]: _type = "Task" [ 1088.651738] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.671955] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832589, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.674070] env[62914]: INFO nova.compute.manager [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Took 23.61 seconds to build instance. [ 1088.761539] env[62914]: DEBUG oslo_concurrency.lockutils [None req-119bf824-11b3-492a-ae7e-f8f92961aaf9 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.414s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.973029] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832586, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.122827} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.973357] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1088.973639] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1088.973929] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49b22aa0-e349-45e2-92de-fcc5f90d79f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.982790] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1088.982790] env[62914]: value = "task-4832591" [ 1088.982790] env[62914]: _type = "Task" [ 1088.982790] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.993696] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.037608] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.117673] env[62914]: DEBUG oslo_concurrency.lockutils [req-62f95ea6-19ed-4cd0-84d7-887025262007 req-8782d0d8-1217-49d2-835c-52a2ee161a92 service nova] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.118601] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.119150] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1089.121914] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.280s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.122148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.127732] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.713s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.127988] env[62914]: DEBUG nova.objects.instance [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid 417d4287-0f76-4d2e-b1da-43455d7ff3e6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.158465] env[62914]: INFO nova.scheduler.client.report [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Deleted allocations for instance 18329e67-719b-4609-83de-7db2c4096781 [ 1089.168499] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832589, 'name': Rename_Task, 'duration_secs': 0.512091} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.168990] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1089.169416] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a01f4fe-6749-47ea-9919-e5d532d0174e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.179021] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5869ba7e-3d6f-4071-9c31-3d7ff0a39943 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.121s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.179021] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1089.179021] env[62914]: value = "task-4832592" [ 1089.179021] env[62914]: _type = "Task" [ 1089.179021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.195921] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.203828] env[62914]: DEBUG nova.compute.manager [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Received event network-changed-371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1089.203828] env[62914]: DEBUG nova.compute.manager [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Refreshing instance network info cache due to event network-changed-371d22eb-8e46-423e-b4cc-a52d3dbc0879. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1089.204154] env[62914]: DEBUG oslo_concurrency.lockutils [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] Acquiring lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.204248] env[62914]: DEBUG oslo_concurrency.lockutils [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] Acquired lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.204416] env[62914]: DEBUG nova.network.neutron [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Refreshing network info cache for port 371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1089.493966] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.223531} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.494279] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1089.496065] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536d857d-4412-4c2d-bfc2-98eca530e299 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.519111] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.519454] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f97c9e0-fb7b-410c-a576-572ffa244324 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.543183] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832588, 'name': ReconfigVM_Task, 'duration_secs': 0.888378} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.544591] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.549445] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1089.549445] env[62914]: value = "task-4832593" [ 1089.549445] env[62914]: _type = "Task" [ 1089.549445] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.549675] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-439a1736-2903-48d9-9ce5-525bebe36e8b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.569166] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1089.569166] env[62914]: value = "task-4832594" [ 1089.569166] env[62914]: _type = "Task" [ 1089.569166] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.580251] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832594, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.633176] env[62914]: DEBUG nova.compute.utils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1089.637372] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1089.637545] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1089.667701] env[62914]: DEBUG oslo_concurrency.lockutils [None req-51d02cb5-c0b4-4b72-ba9e-33ca8c7cedc2 tempest-ServerDiskConfigTestJSON-253234715 tempest-ServerDiskConfigTestJSON-253234715-project-member] Lock "18329e67-719b-4609-83de-7db2c4096781" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.197s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.688317] env[62914]: DEBUG nova.policy [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39e9b10e4bb441a7b27dcd9cdb657375', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '634fc5d7cd72455290ed9c27c8c4ee86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1089.690200] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.690551] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.690723] env[62914]: DEBUG nova.compute.manager [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1089.694808] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923f057f-faa9-445e-bc44-14644a892975 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.698037] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832592, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.705308] env[62914]: DEBUG nova.compute.manager [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1089.706170] env[62914]: DEBUG nova.objects.instance [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'flavor' on Instance uuid 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.935158] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb0e362-36d7-410e-9604-bde905332faf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.943272] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b9645d-fd44-4ccb-879b-c90fa5212165 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.977142] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1990b9-997f-4497-908b-2ee1012f7f24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.990240] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bea3626-38b6-4200-afe3-8a1b7294ee44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.006839] env[62914]: DEBUG nova.compute.provider_tree [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.070613] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.082849] env[62914]: DEBUG oslo_vmware.api [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832594, 'name': ReconfigVM_Task, 'duration_secs': 0.338408} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.084427] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1090.095976] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Successfully created port: 926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.135328] env[62914]: DEBUG nova.network.neutron [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updated VIF entry in instance network info cache for port 371d22eb-8e46-423e-b4cc-a52d3dbc0879. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1090.135749] env[62914]: DEBUG nova.network.neutron [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updating instance_info_cache with network_info: [{"id": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "address": "fa:16:3e:66:39:af", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d22eb-8e", "ovs_interfaceid": "371d22eb-8e46-423e-b4cc-a52d3dbc0879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.137841] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1090.192130] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832592, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.214101] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1090.214485] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4972cd21-cd77-4bf1-b56a-c74fa1d1e3fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.223331] env[62914]: DEBUG oslo_vmware.api [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1090.223331] env[62914]: value = "task-4832595" [ 1090.223331] env[62914]: _type = "Task" [ 1090.223331] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.235601] env[62914]: DEBUG oslo_vmware.api [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.510190] env[62914]: DEBUG nova.scheduler.client.report [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.570928] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832593, 'name': ReconfigVM_Task, 'duration_secs': 0.710671} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.571278] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.571939] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cdeb346d-d758-4fcf-803b-2338491858f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.582072] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1090.582072] env[62914]: value = "task-4832596" [ 1090.582072] env[62914]: _type = "Task" [ 1090.582072] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.593567] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832596, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.638577] env[62914]: DEBUG oslo_concurrency.lockutils [req-13a30051-8ff6-4fe7-a8a6-97b84b589d41 req-2474f875-1b18-4a82-bea6-f6538824dd23 service nova] Releasing lock "refresh_cache-67ecc3a1-03b0-4881-b5c4-9c4fa244b292" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.691310] env[62914]: DEBUG oslo_vmware.api [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832592, 'name': PowerOnVM_Task, 'duration_secs': 1.183268} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.691310] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1090.735678] env[62914]: DEBUG oslo_vmware.api [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832595, 'name': PowerOffVM_Task, 'duration_secs': 0.236994} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.735678] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1090.735678] env[62914]: DEBUG nova.compute.manager [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1090.736438] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b8786e-50c6-4806-b6ec-8ba4c14a1a0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.838271] env[62914]: DEBUG nova.compute.manager [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1090.839259] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a717df99-7084-4f74-9f32-2d9344898bb1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.015617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.018238] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.287s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.019763] env[62914]: INFO nova.compute.claims [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.051342] env[62914]: INFO nova.scheduler.client.report [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance 417d4287-0f76-4d2e-b1da-43455d7ff3e6 [ 1091.099083] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832596, 'name': Rename_Task, 'duration_secs': 0.276013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.099083] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1091.099083] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0063e92d-9c67-4248-94c8-2895bb8886a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.105460] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1091.105460] env[62914]: value = "task-4832597" [ 1091.105460] env[62914]: _type = "Task" [ 1091.105460] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.115241] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.135114] env[62914]: DEBUG nova.objects.instance [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'flavor' on Instance uuid 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.149333] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1091.178469] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1091.178722] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1091.178920] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.179134] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1091.179292] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.179447] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1091.179658] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1091.179824] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1091.179998] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1091.180189] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1091.180370] env[62914]: DEBUG nova.virt.hardware [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1091.181309] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04ac6c2-7176-4a0d-93ab-5971de1bcc78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.189775] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fa87c6-d418-4331-a0bb-518e156bf369 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.248596] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b136317-e291-4c55-88be-eef8f95f0017 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.359272] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027f2ac0-aedc-4763-9b42-d4b958031541 tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 36.160s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.566695] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d25f3625-ada3-410c-9494-4310b815ed0b tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "417d4287-0f76-4d2e-b1da-43455d7ff3e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.176s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.584578] env[62914]: DEBUG nova.compute.manager [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Received event network-vif-plugged-926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1091.584578] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] Acquiring lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.585072] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.585072] env[62914]: DEBUG oslo_concurrency.lockutils [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.585685] env[62914]: DEBUG nova.compute.manager [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] No waiting events found dispatching network-vif-plugged-926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1091.585983] env[62914]: WARNING nova.compute.manager [req-0c10448f-df0b-444d-af0b-222154963259 req-b6dece22-7a44-4f64-8a51-b61a846f6db3 service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Received unexpected event network-vif-plugged-926aebc2-7c6b-4107-a2a5-e496438a84db for instance with vm_state building and task_state spawning. [ 1091.621962] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832597, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.641452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a5d9fbed-1c78-40a7-a315-c9a6d71c7579 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.884s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.120124] env[62914]: DEBUG oslo_vmware.api [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832597, 'name': PowerOnVM_Task, 'duration_secs': 0.956299} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.122656] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1092.122857] env[62914]: INFO nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1092.123057] env[62914]: DEBUG nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1092.124398] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd599fc-41e0-46c6-ae39-f6e1b2a42632 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.268341] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Successfully updated port: 926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.275875] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.276319] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.276594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.276809] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.276969] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.278983] env[62914]: INFO nova.compute.manager [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Terminating instance [ 1092.280942] env[62914]: DEBUG nova.compute.manager [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1092.281155] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1092.283529] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8486c7f5-06f6-4542-8213-f55ac6c99ae5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.301797] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1092.302088] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-005d73bf-d28d-47ff-aed6-e2674d20276c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.308275] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b78bb4-1145-4497-ad38-c08feefdd785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.318076] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2a9d64-155c-4e46-aa8d-f3be83009622 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.353867] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b7e95f-de23-4f25-be93-4866724ca255 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.362919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d384d4-2985-4812-a7cd-61813e464ace {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.380184] env[62914]: DEBUG nova.compute.provider_tree [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.383798] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1092.383798] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1092.383798] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore2] 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.384100] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e3d6ede-e84f-4eed-b447-ef757d9b573b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.392622] env[62914]: DEBUG oslo_vmware.api [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1092.392622] env[62914]: value = "task-4832599" [ 1092.392622] env[62914]: _type = "Task" [ 1092.392622] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.404640] env[62914]: DEBUG oslo_vmware.api [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.646920] env[62914]: INFO nova.compute.manager [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Took 25.61 seconds to build instance. [ 1092.757333] env[62914]: DEBUG nova.compute.manager [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Received event network-changed-926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1092.757684] env[62914]: DEBUG nova.compute.manager [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Refreshing instance network info cache due to event network-changed-926aebc2-7c6b-4107-a2a5-e496438a84db. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1092.758046] env[62914]: DEBUG oslo_concurrency.lockutils [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] Acquiring lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.758247] env[62914]: DEBUG oslo_concurrency.lockutils [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] Acquired lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.758464] env[62914]: DEBUG nova.network.neutron [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Refreshing network info cache for port 926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1092.774595] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.885084] env[62914]: DEBUG nova.scheduler.client.report [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.904982] env[62914]: DEBUG oslo_vmware.api [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203739} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.906152] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.906378] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1092.906630] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1092.906853] env[62914]: INFO nova.compute.manager [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1092.907135] env[62914]: DEBUG oslo.service.loopingcall [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.907686] env[62914]: DEBUG nova.compute.manager [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1092.907832] env[62914]: DEBUG nova.network.neutron [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1093.069352] env[62914]: INFO nova.compute.manager [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Rebuilding instance [ 1093.131418] env[62914]: DEBUG nova.compute.manager [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1093.133179] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d64cf32-65b3-4900-9bc4-208e9405ec3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.151709] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ff3a5c93-3e37-4790-ae27-568716c64199 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.126s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.254815] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.255130] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.341915] env[62914]: DEBUG nova.network.neutron [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.390298] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.390844] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1093.615909] env[62914]: DEBUG nova.network.neutron [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.648118] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1093.648118] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-931c7a6b-9f7d-4a36-8954-9e16492d5b57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.657974] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1093.657974] env[62914]: value = "task-4832600" [ 1093.657974] env[62914]: _type = "Task" [ 1093.657974] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.673164] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.758364] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1093.775491] env[62914]: DEBUG nova.network.neutron [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.897938] env[62914]: DEBUG nova.compute.utils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1093.899609] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1093.899823] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1094.112674] env[62914]: DEBUG nova.policy [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493014f3d66341759a8e03a7878d0af8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78ce97bf0a6a4b65b3cd1e316989a1ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1094.120637] env[62914]: DEBUG oslo_concurrency.lockutils [req-b3a8fb49-843f-43bb-8614-df87a928b442 req-978fe684-0775-4e60-a201-df4d7bf74a9b service nova] Releasing lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.121131] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquired lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.121335] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1094.168718] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.279955] env[62914]: INFO nova.compute.manager [-] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Took 1.37 seconds to deallocate network for instance. [ 1094.291461] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.291771] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.293813] env[62914]: INFO nova.compute.claims [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.379121] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.379472] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.403330] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1094.586125] env[62914]: DEBUG nova.compute.manager [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1094.586369] env[62914]: DEBUG nova.compute.manager [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing instance network info cache due to event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1094.586593] env[62914]: DEBUG oslo_concurrency.lockutils [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.586787] env[62914]: DEBUG oslo_concurrency.lockutils [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.586971] env[62914]: DEBUG nova.network.neutron [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1094.676514] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832600, 'name': PowerOffVM_Task, 'duration_secs': 0.771477} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.677799] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Successfully created port: e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.680727] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1094.682408] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1094.753402] env[62914]: INFO nova.compute.manager [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Detaching volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 [ 1094.789313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.811877] env[62914]: INFO nova.virt.block_device [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Attempting to driver detach volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 from mountpoint /dev/sdb [ 1094.811877] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1094.811877] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1094.811877] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1e1523-2d43-4a50-a1cd-9cbaf283d8e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.854457] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a4a5cd-8516-4dfd-b38d-dc81e8790bf5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.864524] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3957a7d2-c398-451d-a095-3c1bd86cad1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.889845] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1094.897688] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65657cce-4a7b-464c-b771-8f160f541a20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.901965] env[62914]: DEBUG nova.compute.manager [req-1f6f3954-d588-4152-b8cf-9d2d57599b3e req-9a4517d8-4b81-4879-991a-88999a4b6d37 service nova] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Received event network-vif-deleted-e159d0dc-e550-43fb-aa88-9a9b59bdc767 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1094.920890] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] The volume has not been displaced from its original location: [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1094.927424] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1094.928373] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c16e9fb0-6ac5-4ba8-a5d0-2e34bcbe88f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.951619] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1094.951619] env[62914]: value = "task-4832601" [ 1094.951619] env[62914]: _type = "Task" [ 1094.951619] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.960841] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.976312] env[62914]: DEBUG nova.network.neutron [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Updating instance_info_cache with network_info: [{"id": "926aebc2-7c6b-4107-a2a5-e496438a84db", "address": "fa:16:3e:35:0d:45", "network": {"id": "d7eb395c-2e5c-434c-9d62-f795fcaa3285", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-956518944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "634fc5d7cd72455290ed9c27c8c4ee86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926aebc2-7c", "ovs_interfaceid": "926aebc2-7c6b-4107-a2a5-e496438a84db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.425649] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.433270] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1095.464593] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832601, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.479403] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.479829] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.479829] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.480016] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.480760] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.480867] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.481262] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.481468] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.481758] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.482044] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.482294] env[62914]: DEBUG nova.virt.hardware [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.483542] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Releasing lock "refresh_cache-c56b9ad3-8c89-44ee-8ee9-8e256bcad573" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.483542] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance network_info: |[{"id": "926aebc2-7c6b-4107-a2a5-e496438a84db", "address": "fa:16:3e:35:0d:45", "network": {"id": "d7eb395c-2e5c-434c-9d62-f795fcaa3285", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-956518944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "634fc5d7cd72455290ed9c27c8c4ee86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926aebc2-7c", "ovs_interfaceid": "926aebc2-7c6b-4107-a2a5-e496438a84db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1095.484679] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e702283b-71d4-4ac7-ae75-2bc9e385177a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.490142] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:0d:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf63c3c8-d774-4b81-9b12-848612a96076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '926aebc2-7c6b-4107-a2a5-e496438a84db', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.498208] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Creating folder: Project (634fc5d7cd72455290ed9c27c8c4ee86). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1095.502166] env[62914]: DEBUG nova.network.neutron [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updated VIF entry in instance network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1095.502631] env[62914]: DEBUG nova.network.neutron [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.503774] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05d2db1d-baf7-494b-b995-a15a937ff635 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.514500] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367b4bd0-cc7a-4767-88bd-17afda574120 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.523827] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2901626-1804-4ee4-b159-d2eb2b704379 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.530621] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Suspending the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1095.532095] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3602195a-7641-471f-91b3-e8f0d4c1c89f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.546073] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Created folder: Project (634fc5d7cd72455290ed9c27c8c4ee86) in parent group-v941773. [ 1095.546538] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Creating folder: Instances. Parent ref: group-v942064. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1095.550858] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fa47062-5f66-4862-8e99-1df05445a9b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.558679] env[62914]: DEBUG oslo_vmware.api [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1095.558679] env[62914]: value = "task-4832603" [ 1095.558679] env[62914]: _type = "Task" [ 1095.558679] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.565988] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Created folder: Instances in parent group-v942064. [ 1095.566347] env[62914]: DEBUG oslo.service.loopingcall [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.570889] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1095.570889] env[62914]: DEBUG oslo_vmware.api [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832603, 'name': SuspendVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.572996] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7af7c5a-50bc-4cae-9d7d-e2ae6c587304 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.596783] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.596783] env[62914]: value = "task-4832605" [ 1095.596783] env[62914]: _type = "Task" [ 1095.596783] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.607246] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832605, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.727123] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132c3f54-f8f2-4c12-a700-f7312f5fb4ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.737962] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166c384b-6a37-4395-8412-f00460ac49c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.780115] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03de95d-f233-46fa-a160-4ea8e967919b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.801616] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c385b0-247a-4d00-a0b9-b226668b66fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.825329] env[62914]: DEBUG nova.compute.provider_tree [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.967333] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832601, 'name': ReconfigVM_Task, 'duration_secs': 0.622783} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.968106] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1095.973426] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61f56716-f057-49b0-934a-9132f524ec8e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.993143] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1095.993143] env[62914]: value = "task-4832606" [ 1095.993143] env[62914]: _type = "Task" [ 1095.993143] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.003463] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.011491] env[62914]: DEBUG oslo_concurrency.lockutils [req-36fcd339-e36c-4234-b618-a44c98af1840 req-5b7e1aec-7057-4e27-99f1-6b870f21705d service nova] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.073617] env[62914]: DEBUG oslo_vmware.api [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832603, 'name': SuspendVM_Task} progress is 54%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.109303] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832605, 'name': CreateVM_Task, 'duration_secs': 0.484397} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.109487] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1096.110746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.110746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.110850] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1096.111298] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c88c84-b7c6-40f3-aa4f-cf8c3f6eb264 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.116504] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1096.116504] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e72c3-89a8-2f08-f05d-e4cff61b4abf" [ 1096.116504] env[62914]: _type = "Task" [ 1096.116504] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.125515] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e72c3-89a8-2f08-f05d-e4cff61b4abf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.329297] env[62914]: DEBUG nova.scheduler.client.report [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1096.506105] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832606, 'name': ReconfigVM_Task, 'duration_secs': 0.301868} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.506514] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1096.573631] env[62914]: DEBUG oslo_vmware.api [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832603, 'name': SuspendVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.629119] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]520e72c3-89a8-2f08-f05d-e4cff61b4abf, 'name': SearchDatastore_Task, 'duration_secs': 0.031751} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.629498] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.629794] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.630112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.630305] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.630509] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.630799] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c61fc4e3-7494-49ca-94e6-2d41a71403d7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.641548] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.641763] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1096.642555] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b55a50-7e53-4f58-8da6-a1a62c6eb31a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.648611] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1096.648611] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268b3eb-658e-e02f-a5f8-316b47e47307" [ 1096.648611] env[62914]: _type = "Task" [ 1096.648611] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.659464] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268b3eb-658e-e02f-a5f8-316b47e47307, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.759844] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Successfully updated port: e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.829454] env[62914]: DEBUG nova.compute.manager [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Received event network-vif-plugged-e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1096.829454] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] Acquiring lock "4648e825-359d-497f-99b4-cbc51b135860-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.829454] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] Lock "4648e825-359d-497f-99b4-cbc51b135860-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.829454] env[62914]: DEBUG oslo_concurrency.lockutils [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] Lock "4648e825-359d-497f-99b4-cbc51b135860-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.829838] env[62914]: DEBUG nova.compute.manager [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] No waiting events found dispatching network-vif-plugged-e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1096.829838] env[62914]: WARNING nova.compute.manager [req-4ba18397-cc75-47a8-92b4-6fa09e894c7d req-c45fd2eb-51db-4e14-b093-6157d2e4816b service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Received unexpected event network-vif-plugged-e597807d-27f1-4d23-b472-414faeba65a8 for instance with vm_state building and task_state spawning. [ 1096.835501] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.835873] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1096.839007] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.050s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.839261] env[62914]: DEBUG nova.objects.instance [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.072702] env[62914]: DEBUG oslo_vmware.api [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832603, 'name': SuspendVM_Task, 'duration_secs': 1.037761} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.075346] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Suspended the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1097.075557] env[62914]: DEBUG nova.compute.manager [None req-c03aae7b-b334-4ac6-835a-4745fb76964c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1097.076420] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89504c0d-e77a-43de-818b-28178e80ed78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.163445] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5268b3eb-658e-e02f-a5f8-316b47e47307, 'name': SearchDatastore_Task, 'duration_secs': 0.02212} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.163445] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20feb943-e124-42a6-ba96-d715ab255682 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.170443] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1097.170443] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52733fab-c4c3-b721-d3a0-9d7af185bc1f" [ 1097.170443] env[62914]: _type = "Task" [ 1097.170443] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.179640] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52733fab-c4c3-b721-d3a0-9d7af185bc1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.263200] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.263365] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.263632] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1097.343435] env[62914]: DEBUG nova.compute.utils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1097.352131] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1097.352348] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1097.404769] env[62914]: DEBUG nova.policy [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9f49642dfac458ab76523d4fdc078a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceffd38633104c58bbdc3176b7489c92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1097.527796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.528044] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.582559] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1097.584318] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f56ea13b-7cf8-47c4-9ecd-1e032216537f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.592391] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1097.592391] env[62914]: value = "task-4832607" [ 1097.592391] env[62914]: _type = "Task" [ 1097.592391] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.607503] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1097.607808] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1097.607952] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1097.608787] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d157a6-f393-443b-b891-0e87d1e6d40a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.634471] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c44de39-cad4-47bd-8b93-ac63276bd5c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.642241] env[62914]: WARNING nova.virt.vmwareapi.driver [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1097.642983] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1097.645378] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a25a8-0d79-406b-bd3c-34d1507e063a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.662528] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1097.662528] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbf05807-47ed-4878-a325-ede7ff0752f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.686498] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52733fab-c4c3-b721-d3a0-9d7af185bc1f, 'name': SearchDatastore_Task, 'duration_secs': 0.024803} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.686975] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.687270] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] c56b9ad3-8c89-44ee-8ee9-8e256bcad573/c56b9ad3-8c89-44ee-8ee9-8e256bcad573.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1097.687554] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1638b9be-5064-4bc8-9f06-5df4789f8906 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.696174] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1097.696174] env[62914]: value = "task-4832609" [ 1097.696174] env[62914]: _type = "Task" [ 1097.696174] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.698164] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1913e800-6be4-4a74-98b6-ff93c245cea5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.714326] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ad1b4a-7f40-4a6f-9132-9e0d7d0065b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.717742] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.750835] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8723538c-360d-4260-acc4-1492f2bfd39e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.753469] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1097.753893] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1097.753893] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore1] 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.756124] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d10883e9-5a14-46f7-9aa4-94a0639f7741 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.762684] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee54f6a-b26b-430b-8e4d-14dee1c6cfa9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.766803] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1097.766803] env[62914]: value = "task-4832610" [ 1097.766803] env[62914]: _type = "Task" [ 1097.766803] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.782577] env[62914]: DEBUG nova.compute.provider_tree [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.789596] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.822468] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1097.855488] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1097.963843] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Successfully created port: 3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1098.034612] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1098.049102] env[62914]: DEBUG nova.network.neutron [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Updating instance_info_cache with network_info: [{"id": "e597807d-27f1-4d23-b472-414faeba65a8", "address": "fa:16:3e:48:99:6a", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape597807d-27", "ovs_interfaceid": "e597807d-27f1-4d23-b472-414faeba65a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.155843] env[62914]: INFO nova.compute.manager [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Resuming [ 1098.156544] env[62914]: DEBUG nova.objects.instance [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'flavor' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.209343] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832609, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.280634] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3358} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.281109] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.281231] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1098.281616] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1098.285385] env[62914]: DEBUG nova.scheduler.client.report [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1098.361089] env[62914]: INFO nova.virt.block_device [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Booting with volume d801e165-dc65-4457-9762-f209bc342e87 at /dev/sda [ 1098.400922] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cae050b4-8074-4dfe-a29f-9ed22f856844 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.410743] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd04ee1c-d3eb-407d-b129-5913317d71c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.445959] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb82b1cc-b00d-4b75-9c15-d020f9594ce1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.454914] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c768afc-21ac-42af-90e5-d5fab043712b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.492671] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc4bf4c-5ea4-459e-9a5c-4206645c19a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.500045] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2c34a5-9e70-4e8c-9fbe-013f73eaf4b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.513552] env[62914]: DEBUG nova.virt.block_device [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updating existing volume attachment record: 4a56ab56-b7ba-4cac-bcab-89e2be33007f {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1098.552504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.552860] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Instance network_info: |[{"id": "e597807d-27f1-4d23-b472-414faeba65a8", "address": "fa:16:3e:48:99:6a", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape597807d-27", "ovs_interfaceid": "e597807d-27f1-4d23-b472-414faeba65a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1098.553356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:99:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78b49840-c3fc-455c-8491-a253ccd92bb5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e597807d-27f1-4d23-b472-414faeba65a8', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.561269] env[62914]: DEBUG oslo.service.loopingcall [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.562489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.562816] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1098.563084] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16a8ddc9-5cbf-4965-a5be-5a805e75ee2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.584119] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.584119] env[62914]: value = "task-4832611" [ 1098.584119] env[62914]: _type = "Task" [ 1098.584119] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.592331] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832611, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.707821] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666681} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.708152] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] c56b9ad3-8c89-44ee-8ee9-8e256bcad573/c56b9ad3-8c89-44ee-8ee9-8e256bcad573.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1098.708343] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.708614] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0b74088-64cc-4122-b885-d02b0acc5419 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.716239] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1098.716239] env[62914]: value = "task-4832612" [ 1098.716239] env[62914]: _type = "Task" [ 1098.716239] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.724723] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.792175] env[62914]: INFO nova.virt.block_device [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Booting with volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 at /dev/sdb [ 1098.794615] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.797117] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.371s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.798800] env[62914]: INFO nova.compute.claims [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1098.823870] env[62914]: INFO nova.scheduler.client.report [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce [ 1098.840972] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f28b1ec-3836-4ff3-9ffe-3032b4369e74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.853835] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c0471f-e5ad-4688-92c1-2c18f412bff5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.869096] env[62914]: DEBUG nova.compute.manager [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Received event network-changed-e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1098.869602] env[62914]: DEBUG nova.compute.manager [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Refreshing instance network info cache due to event network-changed-e597807d-27f1-4d23-b472-414faeba65a8. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1098.869874] env[62914]: DEBUG oslo_concurrency.lockutils [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] Acquiring lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.870076] env[62914]: DEBUG oslo_concurrency.lockutils [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] Acquired lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.870287] env[62914]: DEBUG nova.network.neutron [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Refreshing network info cache for port e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1098.904336] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f35435a0-b9cd-4505-a051-7a8d612041dd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.914277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2db28b4-fbd3-4d74-9605-5b6d0c698027 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.952597] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31eefaff-933b-453e-9c0e-d53db1efc96c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.960584] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c513ca09-8b20-4c22-9942-ca5901b658a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.975538] env[62914]: DEBUG nova.virt.block_device [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating existing volume attachment record: 498e96f4-e3c6-47c3-94ae-1deb50650ed4 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1099.096045] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832611, 'name': CreateVM_Task, 'duration_secs': 0.364116} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.096212] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1099.097433] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.097433] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.097608] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1099.097793] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e726490-74fb-4f86-8187-7ed8ccd21749 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.103813] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1099.103813] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ce2c0-4df6-7fe2-4bc5-86e164e48b4d" [ 1099.103813] env[62914]: _type = "Task" [ 1099.103813] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.113173] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ce2c0-4df6-7fe2-4bc5-86e164e48b4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.164677] env[62914]: DEBUG oslo_concurrency.lockutils [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.164898] env[62914]: DEBUG oslo_concurrency.lockutils [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquired lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.165066] env[62914]: DEBUG nova.network.neutron [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1099.226129] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067303} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.226426] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.227260] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd77115-0025-40b0-86ea-11e3ef1fde46 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.253279] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] c56b9ad3-8c89-44ee-8ee9-8e256bcad573/c56b9ad3-8c89-44ee-8ee9-8e256bcad573.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.253279] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-398652d1-38b8-4d6d-91f5-d911c8f69ff2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.274339] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1099.274339] env[62914]: value = "task-4832613" [ 1099.274339] env[62914]: _type = "Task" [ 1099.274339] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.284502] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832613, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.334184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a9e771cd-8c7b-418d-a2bd-89c2bfec80e9 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.058s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.607811] env[62914]: DEBUG nova.network.neutron [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Updated VIF entry in instance network info cache for port e597807d-27f1-4d23-b472-414faeba65a8. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1099.607811] env[62914]: DEBUG nova.network.neutron [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Updating instance_info_cache with network_info: [{"id": "e597807d-27f1-4d23-b472-414faeba65a8", "address": "fa:16:3e:48:99:6a", "network": {"id": "852b2ca1-a851-4ad0-9827-22b7ed3e95f1", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-622636532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78ce97bf0a6a4b65b3cd1e316989a1ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78b49840-c3fc-455c-8491-a253ccd92bb5", "external-id": "nsx-vlan-transportzone-779", "segmentation_id": 779, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape597807d-27", "ovs_interfaceid": "e597807d-27f1-4d23-b472-414faeba65a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.619365] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ce2c0-4df6-7fe2-4bc5-86e164e48b4d, 'name': SearchDatastore_Task, 'duration_secs': 0.033539} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.620614] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.621043] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.621525] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.621795] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.622104] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.623012] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4f5559a-dfc9-49b3-b552-29704fa53543 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.642111] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.642111] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1099.642111] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3696ff1a-81cc-4f4b-924f-fb3f5c2338ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.647630] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1099.647630] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0c931-eb0c-6dcb-9150-8025c9e37f6f" [ 1099.647630] env[62914]: _type = "Task" [ 1099.647630] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.656707] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0c931-eb0c-6dcb-9150-8025c9e37f6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.791234] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.890295] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Successfully updated port: 3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1100.055506] env[62914]: INFO nova.compute.manager [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Rebuilding instance [ 1100.110161] env[62914]: DEBUG nova.compute.manager [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1100.111083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699786ce-060c-42df-a885-ae635fc6ce94 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.116166] env[62914]: DEBUG oslo_concurrency.lockutils [req-42be2e52-f333-446b-9b32-5fb7fc395b7b req-befb0c06-b754-4b32-91aa-a32463c8d453 service nova] Releasing lock "refresh_cache-4648e825-359d-497f-99b4-cbc51b135860" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.131566] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7acd72-6c46-4fbd-8c86-7c79d6e288c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.140357] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c10cd4-7406-4671-8282-0683006fb379 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.175441] env[62914]: DEBUG nova.network.neutron [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [{"id": "00706251-f634-4dcb-9705-105152de241f", "address": "fa:16:3e:66:a2:0d", "network": {"id": "0d1783c5-4099-4140-bb0d-8c3f740d2f5f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2117237055-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ffdaa966ecb4979845fda7778c7fb45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "456bd8a2-0fb6-4b17-9d25-08e7995c5184", "external-id": "nsx-vlan-transportzone-65", "segmentation_id": 65, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00706251-f6", "ovs_interfaceid": "00706251-f634-4dcb-9705-105152de241f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.183028] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cc2b5c-c437-48bd-ba74-5edf2e9fd723 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.191327] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b0c931-eb0c-6dcb-9150-8025c9e37f6f, 'name': SearchDatastore_Task, 'duration_secs': 0.036003} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.192535] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac3fe5f-636c-4aa7-a330-7a95183750a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.197368] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04c2fa3d-4f73-4141-b619-e6bdcb1e1f15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.211079] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1100.211079] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5213ed16-1ab5-b193-2697-7365932c30e8" [ 1100.211079] env[62914]: _type = "Task" [ 1100.211079] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.212035] env[62914]: DEBUG nova.compute.provider_tree [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.222587] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5213ed16-1ab5-b193-2697-7365932c30e8, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.223451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.223725] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4648e825-359d-497f-99b4-cbc51b135860/4648e825-359d-497f-99b4-cbc51b135860.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1100.223985] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc349545-8d21-4cbc-bd81-60a8ae3699e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.231128] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1100.231128] env[62914]: value = "task-4832614" [ 1100.231128] env[62914]: _type = "Task" [ 1100.231128] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.239676] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.285924] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832613, 'name': ReconfigVM_Task, 'duration_secs': 0.738024} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.286337] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Reconfigured VM instance instance-0000006c to attach disk [datastore2] c56b9ad3-8c89-44ee-8ee9-8e256bcad573/c56b9ad3-8c89-44ee-8ee9-8e256bcad573.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.287076] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20c967e2-e462-4be6-b448-f047ca123dba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.294685] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1100.294685] env[62914]: value = "task-4832615" [ 1100.294685] env[62914]: _type = "Task" [ 1100.294685] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.304739] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832615, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.397515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.397669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquired lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.397830] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1100.544376] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.545493] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.609923] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1100.610637] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1100.611085] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1100.611431] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.611711] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1100.611943] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.612182] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1100.612433] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1100.612713] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1100.612958] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1100.613159] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1100.613343] env[62914]: DEBUG nova.virt.hardware [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.614740] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2678ad2f-62ca-4de9-98b7-e556dcf324fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.626726] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe13888-ac52-4d7d-908e-7bca7f4c8980 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.632548] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1100.633361] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-938c2b78-b3a8-4df7-873c-b4bff8d08a48 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.641247] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1100.641247] env[62914]: value = "task-4832616" [ 1100.641247] env[62914]: _type = "Task" [ 1100.641247] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.664140] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.684233] env[62914]: DEBUG oslo_concurrency.lockutils [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Releasing lock "refresh_cache-455965de-816d-4ab2-9d5e-a12b06893e6f" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.685457] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df3d53e-6729-4a0e-bda7-b308bf61ae72 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.692916] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Resuming the VM {{(pid=62914) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1100.693266] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-029b9f57-fc65-48ba-ba99-7279e158960c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.701112] env[62914]: DEBUG oslo_vmware.api [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1100.701112] env[62914]: value = "task-4832617" [ 1100.701112] env[62914]: _type = "Task" [ 1100.701112] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.710076] env[62914]: DEBUG oslo_vmware.api [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.717362] env[62914]: DEBUG nova.scheduler.client.report [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1100.742079] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832614, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475122} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.742452] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4648e825-359d-497f-99b4-cbc51b135860/4648e825-359d-497f-99b4-cbc51b135860.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1100.742724] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1100.742999] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65847b6e-6acc-46b2-a296-d99770902bdf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.751018] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1100.751018] env[62914]: value = "task-4832618" [ 1100.751018] env[62914]: _type = "Task" [ 1100.751018] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.760227] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.806468] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832615, 'name': Rename_Task, 'duration_secs': 0.200609} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.806681] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1100.806970] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-560ab54b-b4ea-4d52-b77d-2c6b8d64528c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.815232] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1100.815232] env[62914]: value = "task-4832619" [ 1100.815232] env[62914]: _type = "Task" [ 1100.815232] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.824486] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.895877] env[62914]: DEBUG nova.compute.manager [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Received event network-vif-plugged-3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1100.896140] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Acquiring lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.896327] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.896497] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.896677] env[62914]: DEBUG nova.compute.manager [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] No waiting events found dispatching network-vif-plugged-3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1100.896850] env[62914]: WARNING nova.compute.manager [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Received unexpected event network-vif-plugged-3724af6c-0dc9-4056-9f6c-d5d8f85f195e for instance with vm_state building and task_state spawning. [ 1100.897033] env[62914]: DEBUG nova.compute.manager [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Received event network-changed-3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1100.897188] env[62914]: DEBUG nova.compute.manager [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Refreshing instance network info cache due to event network-changed-3724af6c-0dc9-4056-9f6c-d5d8f85f195e. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1100.897361] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Acquiring lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.932269] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1101.047171] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1101.086992] env[62914]: DEBUG nova.network.neutron [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updating instance_info_cache with network_info: [{"id": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "address": "fa:16:3e:74:74:3d", "network": {"id": "f0aa6b1f-217f-4abb-be3b-40cf13b8ea6d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-55159045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceffd38633104c58bbdc3176b7489c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3724af6c-0d", "ovs_interfaceid": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.127170] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1101.127450] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1101.127614] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.127803] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1101.127958] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.128131] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1101.128353] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1101.128516] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1101.128690] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1101.128854] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1101.129044] env[62914]: DEBUG nova.virt.hardware [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1101.129910] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de395215-998a-4d53-bc60-a5c057da89a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.138161] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95193852-eb8f-475f-986f-cc5bb28119e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.153444] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:5c:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '689aba7f-31af-4116-8b4e-bcec10c9c5ba', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.160939] env[62914]: DEBUG oslo.service.loopingcall [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.164647] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1101.164926] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94d49733-4c61-467b-9770-5d32505f938a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.185562] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832616, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.186933] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.186933] env[62914]: value = "task-4832620" [ 1101.186933] env[62914]: _type = "Task" [ 1101.186933] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.196680] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832620, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.212914] env[62914]: DEBUG oslo_vmware.api [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832617, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.224604] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.224811] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1101.227545] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.665s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.228990] env[62914]: INFO nova.compute.claims [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.262430] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.326388] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832619, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.572191] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.590274] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Releasing lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.590484] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance network_info: |[{"id": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "address": "fa:16:3e:74:74:3d", "network": {"id": "f0aa6b1f-217f-4abb-be3b-40cf13b8ea6d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-55159045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceffd38633104c58bbdc3176b7489c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3724af6c-0d", "ovs_interfaceid": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1101.590779] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Acquired lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.590971] env[62914]: DEBUG nova.network.neutron [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Refreshing network info cache for port 3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1101.592301] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:74:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604c9724-b4ef-4393-a76e-eb4a2b510796', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3724af6c-0dc9-4056-9f6c-d5d8f85f195e', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.599828] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Creating folder: Project (ceffd38633104c58bbdc3176b7489c92). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1101.603458] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7ede722-0cbd-4987-b6aa-549c2060569b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.618522] env[62914]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1101.619200] env[62914]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62914) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1101.619200] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Folder already exists: Project (ceffd38633104c58bbdc3176b7489c92). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1101.619386] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Creating folder: Instances. Parent ref: group-v942052. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1101.619632] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-265023cc-7b52-4c6b-bd2a-978b82493efd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.633478] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Created folder: Instances in parent group-v942052. [ 1101.633991] env[62914]: DEBUG oslo.service.loopingcall [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.634247] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1101.634527] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e602eb6-53eb-4a94-be6e-6fe54e527dba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.657749] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.657749] env[62914]: value = "task-4832623" [ 1101.657749] env[62914]: _type = "Task" [ 1101.657749] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.664213] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832616, 'name': PowerOffVM_Task, 'duration_secs': 0.598227} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.664930] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1101.665207] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1101.666010] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb34f07-c216-474d-8a3f-332933ce9048 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.671745] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832623, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.676689] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1101.676955] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c7a6d52-b60a-47b8-840b-b78066bc5ad3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.701980] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832620, 'name': CreateVM_Task, 'duration_secs': 0.399574} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.702325] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1101.706184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.706389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.706665] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1101.706965] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2440181a-2710-476c-9327-92fbdc89dec1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.718635] env[62914]: DEBUG oslo_vmware.api [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832617, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.720149] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1101.720149] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52322cc3-d0ed-c549-0403-5fd198a3f477" [ 1101.720149] env[62914]: _type = "Task" [ 1101.720149] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.732201] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52322cc3-d0ed-c549-0403-5fd198a3f477, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.733705] env[62914]: DEBUG nova.compute.utils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1101.737886] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1101.737886] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1101.762898] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.772025] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1101.772319] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1101.772550] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleting the datastore file [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.772888] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27b3b911-dbac-4357-b6a5-e883491c5069 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.780114] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1101.780114] env[62914]: value = "task-4832625" [ 1101.780114] env[62914]: _type = "Task" [ 1101.780114] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.788159] env[62914]: DEBUG nova.policy [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1101.796761] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.827398] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832619, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.894030] env[62914]: DEBUG nova.network.neutron [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updated VIF entry in instance network info cache for port 3724af6c-0dc9-4056-9f6c-d5d8f85f195e. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1101.894734] env[62914]: DEBUG nova.network.neutron [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updating instance_info_cache with network_info: [{"id": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "address": "fa:16:3e:74:74:3d", "network": {"id": "f0aa6b1f-217f-4abb-be3b-40cf13b8ea6d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-55159045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceffd38633104c58bbdc3176b7489c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3724af6c-0d", "ovs_interfaceid": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.172544] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832623, 'name': CreateVM_Task, 'duration_secs': 0.448927} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.172544] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1102.172919] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '4a56ab56-b7ba-4cac-bcab-89e2be33007f', 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942060', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'name': 'volume-d801e165-dc65-4457-9762-f209bc342e87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19f21caa-7d96-4526-bb12-768c4fe4d23e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'serial': 'd801e165-dc65-4457-9762-f209bc342e87'}, 'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62914) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1102.173172] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Root volume attach. Driver type: vmdk {{(pid=62914) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1102.174013] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd6552f-7e19-4634-b973-ca7f5287b308 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.184590] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe8bfe5-1917-4047-9ca5-0601011e0b80 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.193622] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89782a72-c9f7-4bf7-be94-924f25cbc302 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.202410] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4b923ca4-8e64-44bb-858b-f12b974b768a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.217838] env[62914]: DEBUG oslo_vmware.api [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832617, 'name': PowerOnVM_Task, 'duration_secs': 1.422085} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.219547] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Resumed the VM {{(pid=62914) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1102.219902] env[62914]: DEBUG nova.compute.manager [None req-515cf1c3-bcec-41ed-927e-fc89a7e9650c tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1102.220284] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1102.220284] env[62914]: value = "task-4832626" [ 1102.220284] env[62914]: _type = "Task" [ 1102.220284] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.221165] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fea4eef-8fdc-47fd-ade0-6fb7b923de44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.257513] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1102.262473] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832626, 'name': RelocateVM_Task} progress is 7%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.263119] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52322cc3-d0ed-c549-0403-5fd198a3f477, 'name': SearchDatastore_Task, 'duration_secs': 0.026218} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.267429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.268582] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.269049] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.269875] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.270260] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.280926] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac80c55f-8349-4647-8c20-d956585f4426 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.291564] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.06061} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.292589] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.293913] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8c4c6c-55bf-483c-a424-7b8e2f0d3926 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.303756] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318191} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.306059] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.307089] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1102.307089] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1102.311853] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.311853] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1102.326661] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d05003-0657-479d-a095-df985fe4808c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.342259] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 4648e825-359d-497f-99b4-cbc51b135860/4648e825-359d-497f-99b4-cbc51b135860.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.352400] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-721e5762-968d-47c9-a28e-8e4c09ffcee3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.369677] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Successfully created port: 7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.376122] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1102.376122] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52780351-96d9-db8f-deef-72997b7dd7aa" [ 1102.376122] env[62914]: _type = "Task" [ 1102.376122] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.379802] env[62914]: DEBUG oslo_vmware.api [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832619, 'name': PowerOnVM_Task, 'duration_secs': 1.523231} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.387017] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1102.387298] env[62914]: INFO nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Took 11.24 seconds to spawn the instance on the hypervisor. [ 1102.387484] env[62914]: DEBUG nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1102.387837] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1102.387837] env[62914]: value = "task-4832627" [ 1102.387837] env[62914]: _type = "Task" [ 1102.387837] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.389272] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a343a2e-c8bc-4f1c-9070-0c646e76ebdb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.400118] env[62914]: DEBUG oslo_concurrency.lockutils [req-72e7c5a8-6a35-465c-9f7a-f635767279ea req-f5e12cc6-ee03-40b3-9273-d416712cab66 service nova] Releasing lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.400621] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52780351-96d9-db8f-deef-72997b7dd7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.413207] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.652948] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceed43d-af8c-4dea-9b43-d968e4f0083a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.661734] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc66222-ecb6-45e1-9bdc-98b67a3cdd01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.698550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5d2414-7887-4205-94d8-bd6baf9c5e17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.707120] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bad2e6-4ab2-4759-9f78-727ec4103a59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.724328] env[62914]: DEBUG nova.compute.provider_tree [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.736035] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832626, 'name': RelocateVM_Task} progress is 20%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.890508] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52780351-96d9-db8f-deef-72997b7dd7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.026205} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.892076] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-523e0866-663c-47c9-9a92-24f5dda0e118 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.900405] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1102.900405] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a67da-0b3c-8573-8378-fdeba8393828" [ 1102.900405] env[62914]: _type = "Task" [ 1102.900405] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.904061] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832627, 'name': ReconfigVM_Task, 'duration_secs': 0.367071} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.907308] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 4648e825-359d-497f-99b4-cbc51b135860/4648e825-359d-497f-99b4-cbc51b135860.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.908474] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07b4915c-a807-413f-97fb-c9a08bb4c313 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.920686] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a67da-0b3c-8573-8378-fdeba8393828, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.925140] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1102.925140] env[62914]: value = "task-4832628" [ 1102.925140] env[62914]: _type = "Task" [ 1102.925140] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.929457] env[62914]: INFO nova.compute.manager [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Took 22.91 seconds to build instance. [ 1102.936526] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832628, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.230814] env[62914]: DEBUG nova.scheduler.client.report [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1103.240196] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832626, 'name': RelocateVM_Task, 'duration_secs': 0.777008} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.240509] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1103.240740] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942060', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'name': 'volume-d801e165-dc65-4457-9762-f209bc342e87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19f21caa-7d96-4526-bb12-768c4fe4d23e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'serial': 'd801e165-dc65-4457-9762-f209bc342e87'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1103.241542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c675defa-3726-4313-a806-dd5ae1b268bf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.258787] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2833ba27-8d99-43ca-8af6-46b30b536aba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.282649] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-d801e165-dc65-4457-9762-f209bc342e87/volume-d801e165-dc65-4457-9762-f209bc342e87.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.283893] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1103.286493] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d855bb77-cc7e-40d5-8228-c6302708f952 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.311621] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1103.311621] env[62914]: value = "task-4832629" [ 1103.311621] env[62914]: _type = "Task" [ 1103.311621] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.318647] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832629, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.329731] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.329954] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.330060] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.330286] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.330498] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.330709] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.330985] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.331240] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.331500] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.331703] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.331904] env[62914]: DEBUG nova.virt.hardware [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.332892] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd69f11-57ef-417a-a9b6-7c2938145458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.344574] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aaf0ed-20c5-4dff-ac51-77bf8f240f71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.383867] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.384141] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.384305] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.384490] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.384655] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.384787] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.385111] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.385331] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.385553] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.385766] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.385990] env[62914]: DEBUG nova.virt.hardware [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.387171] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d045d9dd-eef0-4ec6-bacb-1c1976782678 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.397065] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1714a8e5-3084-4e6a-bd83-777ff13eb0fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.412240] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:90:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c9a12d2-469f-4199-bfaa-f791d765deac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57890d0b-660c-4230-8104-4d1ae53eb7ce', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1103.419805] env[62914]: DEBUG oslo.service.loopingcall [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1103.423450] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1103.423787] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7d1df0b-2dcc-4288-bb62-67257c0f94e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.439116] env[62914]: DEBUG oslo_concurrency.lockutils [None req-89883712-de45-482a-be50-94b55d7f5af3 tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.445s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.447044] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525a67da-0b3c-8573-8378-fdeba8393828, 'name': SearchDatastore_Task, 'duration_secs': 0.027576} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.448899] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.449280] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1103.449573] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1103.449573] env[62914]: value = "task-4832630" [ 1103.449573] env[62914]: _type = "Task" [ 1103.449573] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.449842] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c69df81-25d1-47c5-8812-76491d39b395 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.458274] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832628, 'name': Rename_Task, 'duration_secs': 0.16226} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.459087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1103.459741] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d63b8f4-0e5a-4423-a913-a106b8b9584e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.465774] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832630, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.468163] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1103.468163] env[62914]: value = "task-4832631" [ 1103.468163] env[62914]: _type = "Task" [ 1103.468163] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.468431] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1103.468431] env[62914]: value = "task-4832632" [ 1103.468431] env[62914]: _type = "Task" [ 1103.468431] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.486891] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.487289] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.735870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.736558] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1103.739947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.168s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.742509] env[62914]: INFO nova.compute.claims [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1103.826789] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.873611] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.873611] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.873919] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.874095] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.874229] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.878081] env[62914]: INFO nova.compute.manager [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Terminating instance [ 1103.881268] env[62914]: DEBUG nova.compute.manager [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1103.881514] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1103.883922] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec91869a-8695-471f-8b45-78ce60ccc16a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.903030] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1103.903030] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf6bfac6-5165-4610-b690-def499ef7da4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.911492] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1103.911492] env[62914]: value = "task-4832633" [ 1103.911492] env[62914]: _type = "Task" [ 1103.911492] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.925176] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.968360] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832630, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.986305] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832631, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.990516] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832632, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.250946] env[62914]: DEBUG nova.compute.utils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1104.257351] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1104.257351] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1104.321136] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832629, 'name': ReconfigVM_Task, 'duration_secs': 0.850584} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.321136] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-d801e165-dc65-4457-9762-f209bc342e87/volume-d801e165-dc65-4457-9762-f209bc342e87.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.327231] env[62914]: DEBUG nova.policy [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77fdc78360d04ca38a035d8a4802c27a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '046dc510102e43948f7d2649e6d58ee3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1104.328748] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d71c847-9532-4cf5-ac1d-50b36574074d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.347710] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1104.347710] env[62914]: value = "task-4832634" [ 1104.347710] env[62914]: _type = "Task" [ 1104.347710] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.362606] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832634, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.423117] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832633, 'name': PowerOffVM_Task, 'duration_secs': 0.401099} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.423435] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1104.423615] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1104.423888] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a4e24fc-d6e8-4b42-b0d8-e90c8f731eb4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.464027] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832630, 'name': CreateVM_Task, 'duration_secs': 0.557253} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.464232] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1104.464988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.465199] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.466488] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1104.467071] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d8a668f-19c8-4d12-a86c-3f24928044cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.478748] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1104.478748] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d8a23e-3409-ba5f-d8f8-696a1f950d5b" [ 1104.478748] env[62914]: _type = "Task" [ 1104.478748] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.492569] env[62914]: DEBUG oslo_vmware.api [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832632, 'name': PowerOnVM_Task, 'duration_secs': 0.541921} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.492887] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751042} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.493637] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1104.493896] env[62914]: INFO nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1104.494114] env[62914]: DEBUG nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1104.494423] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1104.494631] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.496309] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba13e8e0-41f4-4b6f-97aa-c6f24aa1303c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.500062] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14464680-d64d-4899-a42d-3675e47fcdb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.510382] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1104.510382] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1104.510713] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Deleting the datastore file [datastore2] c56b9ad3-8c89-44ee-8ee9-8e256bcad573 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.510831] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52d8a23e-3409-ba5f-d8f8-696a1f950d5b, 'name': SearchDatastore_Task, 'duration_secs': 0.02258} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.511890] env[62914]: DEBUG nova.compute.manager [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Received event network-vif-plugged-7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1104.512119] env[62914]: DEBUG oslo_concurrency.lockutils [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] Acquiring lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.512333] env[62914]: DEBUG oslo_concurrency.lockutils [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.512506] env[62914]: DEBUG oslo_concurrency.lockutils [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.512680] env[62914]: DEBUG nova.compute.manager [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] No waiting events found dispatching network-vif-plugged-7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1104.512928] env[62914]: WARNING nova.compute.manager [req-aef52441-5c35-4993-9706-ab236cb5f0b7 req-60421cff-52d8-468f-bb99-2a18db7fc7da service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Received unexpected event network-vif-plugged-7cfd599e-0580-46fa-95e7-a1412897fede for instance with vm_state building and task_state spawning. [ 1104.514780] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a60819f9-b3a0-4e87-b061-4e5fb4c02308 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.517129] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.517538] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1104.517750] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.517888] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.518251] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.521958] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c70c2501-87c7-4d4f-827b-f880167c8ab7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.524947] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1104.524947] env[62914]: value = "task-4832636" [ 1104.524947] env[62914]: _type = "Task" [ 1104.524947] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.543023] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for the task: (returnval){ [ 1104.543023] env[62914]: value = "task-4832637" [ 1104.543023] env[62914]: _type = "Task" [ 1104.543023] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.555082] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.555082] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.555082] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1104.556246] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d675d9e-5447-41f3-b9dd-ef8f295a4f4d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.562783] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.567058] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1104.567058] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f860ba-a7b5-c285-c1a4-501a081ab99f" [ 1104.567058] env[62914]: _type = "Task" [ 1104.567058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.578046] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f860ba-a7b5-c285-c1a4-501a081ab99f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.669875] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Successfully created port: 9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.671483] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Successfully updated port: 7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1104.758036] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1104.861741] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832634, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.053478] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093333} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.058966] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.059763] env[62914]: INFO nova.compute.manager [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Took 19.36 seconds to build instance. [ 1105.061465] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8a3427-dac2-4cb7-ad7f-5ac5c60f6cb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.082272] env[62914]: DEBUG oslo_vmware.api [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Task: {'id': task-4832637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170993} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.094014] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.094333] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1105.094536] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1105.094721] env[62914]: INFO nova.compute.manager [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1105.095112] env[62914]: DEBUG oslo.service.loopingcall [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.105138] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.112389] env[62914]: DEBUG nova.compute.manager [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1105.112575] env[62914]: DEBUG nova.network.neutron [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.114511] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9eaf9bd-3af5-4b51-b0aa-27218cb9e9d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.133996] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f860ba-a7b5-c285-c1a4-501a081ab99f, 'name': SearchDatastore_Task, 'duration_secs': 0.014852} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.136156] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b01832c7-9b5d-429f-aae3-f57f4936228c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.141910] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1105.141910] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233b71b-1743-e1d2-f73f-d6c01808f27f" [ 1105.141910] env[62914]: _type = "Task" [ 1105.141910] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.143843] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1105.143843] env[62914]: value = "task-4832638" [ 1105.143843] env[62914]: _type = "Task" [ 1105.143843] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.152312] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb86000d-33f5-4758-be7b-6c2ee5d47b98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.160962] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233b71b-1743-e1d2-f73f-d6c01808f27f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.166258] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.167872] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fb89aa-c5e1-4057-a403-35b478c058ba {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.215800] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.215996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.216211] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1105.225029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1ca3d0-9bb4-46f9-8cef-b1b9e0157912 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.235083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b888f8-7daf-4ae1-824b-269c249ad8fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.253919] env[62914]: DEBUG nova.compute.provider_tree [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.367107] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832634, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.575702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-77e29fe3-d822-41ca-b588-2c3e30773408 tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.884s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.664149] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.664149] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5233b71b-1743-e1d2-f73f-d6c01808f27f, 'name': SearchDatastore_Task, 'duration_secs': 0.028484} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.664149] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.664149] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1105.664149] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-668eddbc-bde4-43ae-91cd-f14d5570fdd6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.675103] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1105.675103] env[62914]: value = "task-4832639" [ 1105.675103] env[62914]: _type = "Task" [ 1105.675103] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.689034] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.757267] env[62914]: DEBUG nova.scheduler.client.report [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1105.766395] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1105.771702] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1105.808601] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1105.808825] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1105.809201] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.809654] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1105.810015] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.810334] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1105.810794] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1105.811051] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1105.811320] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1105.811548] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1105.812640] env[62914]: DEBUG nova.virt.hardware [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1105.812868] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2621981f-2725-4a06-8464-fbc99bc71f6d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.823299] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef566aed-8ce8-42d6-894d-259fffe3614c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.862669] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832634, 'name': ReconfigVM_Task, 'duration_secs': 1.144428} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.863074] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942060', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'name': 'volume-d801e165-dc65-4457-9762-f209bc342e87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19f21caa-7d96-4526-bb12-768c4fe4d23e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'serial': 'd801e165-dc65-4457-9762-f209bc342e87'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1105.864389] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1def4cef-4b36-4e35-a92e-0f554a37d82f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.871871] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1105.871871] env[62914]: value = "task-4832640" [ 1105.871871] env[62914]: _type = "Task" [ 1105.871871] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.882897] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832640, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.109254] env[62914]: DEBUG nova.network.neutron [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Updating instance_info_cache with network_info: [{"id": "7cfd599e-0580-46fa-95e7-a1412897fede", "address": "fa:16:3e:74:15:3f", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cfd599e-05", "ovs_interfaceid": "7cfd599e-0580-46fa-95e7-a1412897fede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.163601] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832638, 'name': ReconfigVM_Task, 'duration_secs': 0.689789} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.163723] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 3b26b5d7-524a-41af-ab75-a158568e031e/3b26b5d7-524a-41af-ab75-a158568e031e.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.164862] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'device_type': 'disk', 'encrypted': False, 'encryption_format': None, 'boot_index': 0, 'guest_format': None, 'encryption_secret_uuid': None, 'disk_bus': None, 'encryption_options': None, 'size': 0, 'image_id': '75c43660-b52b-450e-ba36-0f721e14bc6c'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'mount_device': '/dev/sdb', 'attachment_id': '498e96f4-e3c6-47c3-94ae-1deb50650ed4', 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'}, 'delete_on_termination': False, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62914) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1106.165102] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1106.165339] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1106.166308] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4ae72c-c13b-4cac-82fc-1620a4838ea2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.187828] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d13906-b8d6-4e11-af19-3688eb344d99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.214562] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.220236] env[62914]: DEBUG nova.network.neutron [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.221546] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ac33261-137f-44bb-93ff-659d946f64ce {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.236080] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832639, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.242291] env[62914]: DEBUG nova.compute.manager [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Received event network-vif-plugged-9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1106.242586] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] Acquiring lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.242878] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.243143] env[62914]: DEBUG oslo_concurrency.lockutils [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.243369] env[62914]: DEBUG nova.compute.manager [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] No waiting events found dispatching network-vif-plugged-9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1106.244067] env[62914]: WARNING nova.compute.manager [req-73ec78ce-bde6-40b7-972d-f20597e60104 req-8b0f2bf3-771f-4c6c-ae42-0c3535397713 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Received unexpected event network-vif-plugged-9493c2a8-67b9-476a-b101-494b5cab84b3 for instance with vm_state building and task_state spawning. [ 1106.246191] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1106.246191] env[62914]: value = "task-4832641" [ 1106.246191] env[62914]: _type = "Task" [ 1106.246191] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.258862] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832641, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.262952] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.263641] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1106.346155] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Successfully updated port: 9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.352195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "4648e825-359d-497f-99b4-cbc51b135860" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.352195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.352195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "4648e825-359d-497f-99b4-cbc51b135860-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.352195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.352653] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.354696] env[62914]: INFO nova.compute.manager [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Terminating instance [ 1106.356657] env[62914]: DEBUG nova.compute.manager [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1106.356861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1106.357779] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c98bc78-d70f-43da-983d-bb2d12fa89b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.368745] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1106.369657] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8117c41-0a46-4046-98d5-f25a6bf72720 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.377530] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1106.377530] env[62914]: value = "task-4832642" [ 1106.377530] env[62914]: _type = "Task" [ 1106.377530] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.384986] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832640, 'name': Rename_Task, 'duration_secs': 0.302568} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.385611] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1106.385909] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-725b78d9-fcd6-4f1e-8a6f-d9cb65c557c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.390374] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.395980] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1106.395980] env[62914]: value = "task-4832643" [ 1106.395980] env[62914]: _type = "Task" [ 1106.395980] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.405135] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.540301] env[62914]: DEBUG nova.compute.manager [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Received event network-changed-7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1106.540440] env[62914]: DEBUG nova.compute.manager [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Refreshing instance network info cache due to event network-changed-7cfd599e-0580-46fa-95e7-a1412897fede. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1106.540683] env[62914]: DEBUG oslo_concurrency.lockutils [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] Acquiring lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.613384] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.613384] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Instance network_info: |[{"id": "7cfd599e-0580-46fa-95e7-a1412897fede", "address": "fa:16:3e:74:15:3f", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cfd599e-05", "ovs_interfaceid": "7cfd599e-0580-46fa-95e7-a1412897fede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1106.613746] env[62914]: DEBUG oslo_concurrency.lockutils [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] Acquired lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.613971] env[62914]: DEBUG nova.network.neutron [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Refreshing network info cache for port 7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1106.615499] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:15:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cfd599e-0580-46fa-95e7-a1412897fede', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.623527] env[62914]: DEBUG oslo.service.loopingcall [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1106.624116] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1106.624393] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c72e65c-6d5c-4f53-9d50-2e95f264cd89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.646553] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.646553] env[62914]: value = "task-4832644" [ 1106.646553] env[62914]: _type = "Task" [ 1106.646553] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.657022] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832644, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.693482] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832639, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595361} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.693771] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1106.694029] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1106.694340] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24d7cb28-199b-497f-994f-4064a6f1c6c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.701379] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1106.701379] env[62914]: value = "task-4832645" [ 1106.701379] env[62914]: _type = "Task" [ 1106.701379] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.713815] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832645, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.737270] env[62914]: INFO nova.compute.manager [-] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Took 1.62 seconds to deallocate network for instance. [ 1106.757616] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832641, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.774513] env[62914]: DEBUG nova.compute.utils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1106.776876] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1106.776876] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1106.850662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.850855] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquired lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.851026] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1106.874203] env[62914]: DEBUG nova.policy [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1106.889095] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832642, 'name': PowerOffVM_Task, 'duration_secs': 0.21535} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.889671] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1106.889671] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1106.889874] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25bed03c-20eb-42c4-841f-66f4592688d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.907835] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832643, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.044104] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1107.044355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1107.044611] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleting the datastore file [datastore2] 4648e825-359d-497f-99b4-cbc51b135860 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.044931] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c41bc9f-428a-4bec-a60f-ba8ab9f8dbb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.053272] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for the task: (returnval){ [ 1107.053272] env[62914]: value = "task-4832647" [ 1107.053272] env[62914]: _type = "Task" [ 1107.053272] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.065683] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.161475] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832644, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.212141] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832645, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091577} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.213062] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1107.213422] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79d2fac-b68c-4f4f-970c-83f693ba996e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.217203] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Successfully created port: 629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.241618] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1107.241998] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-163ca6f5-b9d6-4b65-82c4-15a01cd1e162 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.257697] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.258781] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.258781] env[62914]: DEBUG nova.objects.instance [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lazy-loading 'resources' on Instance uuid c56b9ad3-8c89-44ee-8ee9-8e256bcad573 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.272357] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832641, 'name': ReconfigVM_Task, 'duration_secs': 0.557379} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.273933] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.278925] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1107.278925] env[62914]: value = "task-4832648" [ 1107.278925] env[62914]: _type = "Task" [ 1107.278925] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.279689] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-834e935e-79dc-4ffb-9baa-f470c8eeda99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.290610] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1107.309078] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.310022] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1107.310022] env[62914]: value = "task-4832649" [ 1107.310022] env[62914]: _type = "Task" [ 1107.310022] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.319277] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832649, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.408227] env[62914]: DEBUG oslo_vmware.api [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832643, 'name': PowerOnVM_Task, 'duration_secs': 0.69142} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.408586] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1107.408822] env[62914]: INFO nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1107.409030] env[62914]: DEBUG nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1107.409875] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba01018-133c-4842-aa14-49b5d0ec1a17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.473199] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1107.564514] env[62914]: DEBUG oslo_vmware.api [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Task: {'id': task-4832647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351974} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.564514] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.564514] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1107.564514] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1107.564915] env[62914]: INFO nova.compute.manager [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1107.564915] env[62914]: DEBUG oslo.service.loopingcall [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.565434] env[62914]: DEBUG nova.compute.manager [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1107.565434] env[62914]: DEBUG nova.network.neutron [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1107.666240] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832644, 'name': CreateVM_Task, 'duration_secs': 0.57674} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.666714] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1107.667419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.667979] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.667979] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1107.668261] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09942a20-6ebd-4549-b7cd-000b1142dcf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.675148] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1107.675148] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf40b5-3da8-a891-3fed-a9d4bc59bdf3" [ 1107.675148] env[62914]: _type = "Task" [ 1107.675148] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.688164] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf40b5-3da8-a891-3fed-a9d4bc59bdf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.689796] env[62914]: DEBUG nova.network.neutron [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Updated VIF entry in instance network info cache for port 7cfd599e-0580-46fa-95e7-a1412897fede. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1107.689796] env[62914]: DEBUG nova.network.neutron [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Updating instance_info_cache with network_info: [{"id": "7cfd599e-0580-46fa-95e7-a1412897fede", "address": "fa:16:3e:74:15:3f", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cfd599e-05", "ovs_interfaceid": "7cfd599e-0580-46fa-95e7-a1412897fede", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.786391] env[62914]: DEBUG nova.network.neutron [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Updating instance_info_cache with network_info: [{"id": "9493c2a8-67b9-476a-b101-494b5cab84b3", "address": "fa:16:3e:42:d4:eb", "network": {"id": "61d3c9d2-b39e-4b59-ac91-38df81cc3052", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-756595598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "046dc510102e43948f7d2649e6d58ee3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9493c2a8-67", "ovs_interfaceid": "9493c2a8-67b9-476a-b101-494b5cab84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.813996] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832648, 'name': ReconfigVM_Task, 'duration_secs': 0.393898} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.818974] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Reconfigured VM instance instance-00000064 to attach disk [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0/b285198b-aa95-4dcb-99b3-531d09c210d0.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.822480] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-164d4274-1753-421d-85b9-94b6b04a1912 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.834408] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832649, 'name': ReconfigVM_Task, 'duration_secs': 0.200023} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.836078] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1107.836783] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1107.836783] env[62914]: value = "task-4832650" [ 1107.836783] env[62914]: _type = "Task" [ 1107.836783] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.837024] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f913773-2a6f-4631-b4bd-1a13e0742f4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.857121] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832650, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.857121] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1107.857121] env[62914]: value = "task-4832651" [ 1107.857121] env[62914]: _type = "Task" [ 1107.857121] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.936414] env[62914]: INFO nova.compute.manager [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Took 13.67 seconds to build instance. [ 1108.132202] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b7e1cb-7b93-41e0-842e-f1c538d44b3a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.143733] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef57548-9255-4a04-b67b-2e26f6375794 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.182990] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3113d280-198b-4151-ab33-5298dafb6fa9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.193972] env[62914]: DEBUG oslo_concurrency.lockutils [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] Releasing lock "refresh_cache-03960f2e-3263-42f7-a7a4-7d7bcd23cf65" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.194355] env[62914]: DEBUG nova.compute.manager [req-6e4e4884-f356-47ec-bec7-086c369e7dba req-20dc36e1-71f5-460d-9890-d568cb12bcd0 service nova] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Received event network-vif-deleted-926aebc2-7c6b-4107-a2a5-e496438a84db {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1108.194809] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf40b5-3da8-a891-3fed-a9d4bc59bdf3, 'name': SearchDatastore_Task, 'duration_secs': 0.033098} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.196080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be853d7-68bf-49ca-8b0b-42c5f0d76594 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.200063] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.200353] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.200816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.200816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.201057] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.201362] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b620d48d-f451-46ed-bec8-a714f95420b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.214780] env[62914]: DEBUG nova.compute.provider_tree [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.217459] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.217719] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1108.218694] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da13b6a9-03d3-4af6-a25e-4b60cf67d6a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.225904] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1108.225904] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bdb49a-2b46-444e-f822-e5546cbb559b" [ 1108.225904] env[62914]: _type = "Task" [ 1108.225904] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.235621] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bdb49a-2b46-444e-f822-e5546cbb559b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.294215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Releasing lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.294509] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Instance network_info: |[{"id": "9493c2a8-67b9-476a-b101-494b5cab84b3", "address": "fa:16:3e:42:d4:eb", "network": {"id": "61d3c9d2-b39e-4b59-ac91-38df81cc3052", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-756595598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "046dc510102e43948f7d2649e6d58ee3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9493c2a8-67", "ovs_interfaceid": "9493c2a8-67b9-476a-b101-494b5cab84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1108.295018] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:d4:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0721b358-3768-472d-95f8-6d6755ab1635', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9493c2a8-67b9-476a-b101-494b5cab84b3', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.303568] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Creating folder: Project (046dc510102e43948f7d2649e6d58ee3). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1108.304072] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb3691cc-841a-4b70-b39d-dd64c4c446c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.308224] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1108.322572] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Created folder: Project (046dc510102e43948f7d2649e6d58ee3) in parent group-v941773. [ 1108.322878] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Creating folder: Instances. Parent ref: group-v942073. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1108.323168] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-244d2f36-ca17-400d-8fe3-3379e012161b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.335907] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Created folder: Instances in parent group-v942073. [ 1108.335907] env[62914]: DEBUG oslo.service.loopingcall [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1108.338261] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1108.338580] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1108.338794] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.339072] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1108.339315] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.339552] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1108.339844] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1108.340074] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1108.340304] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1108.340521] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1108.340745] env[62914]: DEBUG nova.virt.hardware [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1108.341140] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1108.342067] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa6f99c-be1d-4e8a-8b7b-c085e628ddfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.345238] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2d6c4a0-f242-463c-bc79-b6d1673388a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.383227] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74fc892-926a-41fa-a19e-c0d8587b0688 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.391031] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832650, 'name': Rename_Task, 'duration_secs': 0.18958} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.391469] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.391469] env[62914]: value = "task-4832654" [ 1108.391469] env[62914]: _type = "Task" [ 1108.391469] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.397253] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1108.397978] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832651, 'name': Rename_Task, 'duration_secs': 0.212567} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.398975] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d9f4798-6801-4098-a3d9-cde1939f03fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.406935] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1108.424013] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a649a28d-8d8a-4fe4-8782-5eb08bab79a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.435268] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832654, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.438361] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1108.438361] env[62914]: value = "task-4832656" [ 1108.438361] env[62914]: _type = "Task" [ 1108.438361] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.438690] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1108.438690] env[62914]: value = "task-4832655" [ 1108.438690] env[62914]: _type = "Task" [ 1108.438690] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.439210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d3146bdb-305d-47a7-864a-e5a0c1bde1b5 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.184s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.454031] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.456025] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.479674] env[62914]: DEBUG nova.network.neutron [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.484506] env[62914]: DEBUG nova.compute.manager [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Received event network-changed-9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1108.484506] env[62914]: DEBUG nova.compute.manager [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Refreshing instance network info cache due to event network-changed-9493c2a8-67b9-476a-b101-494b5cab84b3. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1108.484506] env[62914]: DEBUG oslo_concurrency.lockutils [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] Acquiring lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.484618] env[62914]: DEBUG oslo_concurrency.lockutils [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] Acquired lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.484807] env[62914]: DEBUG nova.network.neutron [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Refreshing network info cache for port 9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1108.578407] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.578407] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.578407] env[62914]: INFO nova.compute.manager [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Shelving [ 1108.720138] env[62914]: DEBUG nova.scheduler.client.report [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.741383] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bdb49a-2b46-444e-f822-e5546cbb559b, 'name': SearchDatastore_Task, 'duration_secs': 0.041208} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.743400] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93dd22ab-33ee-413f-a3e6-33a43c738797 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.750730] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1108.750730] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52efc664-6079-2a43-ff48-dbc5df909711" [ 1108.750730] env[62914]: _type = "Task" [ 1108.750730] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.762149] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52efc664-6079-2a43-ff48-dbc5df909711, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.910021] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832654, 'name': CreateVM_Task, 'duration_secs': 0.522327} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.911801] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1108.919033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.919033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.919033] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1108.919033] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7647a124-7c73-4e87-9ba0-2f62ab133506 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.927422] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1108.927422] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52679cf6-875b-b23b-38e2-c1caa3eded0b" [ 1108.927422] env[62914]: _type = "Task" [ 1108.927422] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.937455] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Successfully updated port: 629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1108.960642] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52679cf6-875b-b23b-38e2-c1caa3eded0b, 'name': SearchDatastore_Task, 'duration_secs': 0.014576} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.962961] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.962961] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.963736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.968689] env[62914]: DEBUG oslo_vmware.api [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832656, 'name': PowerOnVM_Task, 'duration_secs': 0.523807} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.972665] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1108.973100] env[62914]: DEBUG nova.compute.manager [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1108.973593] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832655, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.974965] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0d7c7c-643f-4009-925a-c8a24ba8ea7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.988707] env[62914]: INFO nova.compute.manager [-] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Took 1.42 seconds to deallocate network for instance. [ 1109.046397] env[62914]: DEBUG nova.compute.manager [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Received event network-changed-3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1109.046601] env[62914]: DEBUG nova.compute.manager [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Refreshing instance network info cache due to event network-changed-3724af6c-0dc9-4056-9f6c-d5d8f85f195e. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1109.046860] env[62914]: DEBUG oslo_concurrency.lockutils [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] Acquiring lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.046981] env[62914]: DEBUG oslo_concurrency.lockutils [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] Acquired lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.047407] env[62914]: DEBUG nova.network.neutron [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Refreshing network info cache for port 3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1109.090350] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1109.090623] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be3ce017-7328-42a6-bd2f-40143e6de14e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.098102] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1109.098102] env[62914]: value = "task-4832657" [ 1109.098102] env[62914]: _type = "Task" [ 1109.098102] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.108546] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.226245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.237216] env[62914]: DEBUG nova.network.neutron [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Updated VIF entry in instance network info cache for port 9493c2a8-67b9-476a-b101-494b5cab84b3. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.237610] env[62914]: DEBUG nova.network.neutron [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Updating instance_info_cache with network_info: [{"id": "9493c2a8-67b9-476a-b101-494b5cab84b3", "address": "fa:16:3e:42:d4:eb", "network": {"id": "61d3c9d2-b39e-4b59-ac91-38df81cc3052", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-756595598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "046dc510102e43948f7d2649e6d58ee3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9493c2a8-67", "ovs_interfaceid": "9493c2a8-67b9-476a-b101-494b5cab84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.257291] env[62914]: INFO nova.scheduler.client.report [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Deleted allocations for instance c56b9ad3-8c89-44ee-8ee9-8e256bcad573 [ 1109.273383] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52efc664-6079-2a43-ff48-dbc5df909711, 'name': SearchDatastore_Task, 'duration_secs': 0.014391} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.273383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.273383] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 03960f2e-3263-42f7-a7a4-7d7bcd23cf65/03960f2e-3263-42f7-a7a4-7d7bcd23cf65.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1109.273383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.273916] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.273916] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41b5690d-19c5-42b3-a604-16bac285a8a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.277893] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b15206ef-049d-406d-a92d-5218ec4c5dd6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.287400] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1109.287400] env[62914]: value = "task-4832658" [ 1109.287400] env[62914]: _type = "Task" [ 1109.287400] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.293026] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.293026] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1109.293289] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6915290f-e42f-49ba-ba43-c6bfc50419da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.301636] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.307242] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1109.307242] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216a35a-c4a2-b2cd-8649-70aba1008b5e" [ 1109.307242] env[62914]: _type = "Task" [ 1109.307242] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.318775] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216a35a-c4a2-b2cd-8649-70aba1008b5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.420101] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.420501] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.420845] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.421170] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.421435] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.424516] env[62914]: INFO nova.compute.manager [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Terminating instance [ 1109.427246] env[62914]: DEBUG nova.compute.manager [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1109.427540] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1109.428858] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d4e00b-b0fd-4564-92d1-97763f6ef936 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.439550] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1109.439951] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-782254e6-f255-41d0-8cb0-980651bedecb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.449536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.449536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.449536] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1109.451142] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1109.451142] env[62914]: value = "task-4832659" [ 1109.451142] env[62914]: _type = "Task" [ 1109.451142] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.465204] env[62914]: DEBUG oslo_vmware.api [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832655, 'name': PowerOnVM_Task, 'duration_secs': 0.879132} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.469251] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1109.469606] env[62914]: DEBUG nova.compute.manager [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1109.470377] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.471776] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61cda15-ad4e-4441-8aaa-68fdffff52b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.495369] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.495740] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.496129] env[62914]: DEBUG nova.objects.instance [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lazy-loading 'resources' on Instance uuid 4648e825-359d-497f-99b4-cbc51b135860 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.497734] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.609970] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832657, 'name': PowerOffVM_Task, 'duration_secs': 0.509573} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.610412] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1109.611523] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167bf381-021a-4180-b4ab-2adbfe34555a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.635620] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c020b629-e515-43d6-9b6d-d8ad6a696102 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.741072] env[62914]: DEBUG oslo_concurrency.lockutils [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] Releasing lock "refresh_cache-422f30c1-fc6a-4c82-9003-806a5959ee8d" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.741380] env[62914]: DEBUG nova.compute.manager [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Received event network-vif-deleted-e597807d-27f1-4d23-b472-414faeba65a8 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1109.741593] env[62914]: INFO nova.compute.manager [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Neutron deleted interface e597807d-27f1-4d23-b472-414faeba65a8; detaching it from the instance and deleting it from the info cache [ 1109.741828] env[62914]: DEBUG nova.network.neutron [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.770419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2d88ff48-908f-4af0-84e7-e010acc03efc tempest-InstanceActionsV221TestJSON-11607536 tempest-InstanceActionsV221TestJSON-11607536-project-member] Lock "c56b9ad3-8c89-44ee-8ee9-8e256bcad573" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.897s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.803666] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832658, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.819483] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5216a35a-c4a2-b2cd-8649-70aba1008b5e, 'name': SearchDatastore_Task, 'duration_secs': 0.032036} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.820410] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c095ba-1a83-47fe-88ab-5c60f230a513 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.828853] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1109.828853] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5280cef8-c089-a0ec-dd5c-ec32a1bb13fd" [ 1109.828853] env[62914]: _type = "Task" [ 1109.828853] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.839818] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5280cef8-c089-a0ec-dd5c-ec32a1bb13fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.936600] env[62914]: DEBUG nova.network.neutron [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updated VIF entry in instance network info cache for port 3724af6c-0dc9-4056-9f6c-d5d8f85f195e. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.937106] env[62914]: DEBUG nova.network.neutron [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updating instance_info_cache with network_info: [{"id": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "address": "fa:16:3e:74:74:3d", "network": {"id": "f0aa6b1f-217f-4abb-be3b-40cf13b8ea6d", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-55159045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceffd38633104c58bbdc3176b7489c92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3724af6c-0d", "ovs_interfaceid": "3724af6c-0dc9-4056-9f6c-d5d8f85f195e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.974808] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832659, 'name': PowerOffVM_Task, 'duration_secs': 0.29195} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.975118] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1109.975302] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1109.975564] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac40deb8-7cb6-455b-b330-4f8e93d97578 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.993736] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.018102] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1110.048416] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1110.048719] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1110.048955] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleting the datastore file [datastore1] 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.049295] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06fbe055-bc9e-42a5-ac29-4c46660d7f74 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.057458] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for the task: (returnval){ [ 1110.057458] env[62914]: value = "task-4832661" [ 1110.057458] env[62914]: _type = "Task" [ 1110.057458] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.069655] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.153711] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1110.154107] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-13dc6ec7-0410-4300-9106-9ff5730e138b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.162538] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1110.162538] env[62914]: value = "task-4832662" [ 1110.162538] env[62914]: _type = "Task" [ 1110.162538] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.175592] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832662, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.198629] env[62914]: DEBUG nova.network.neutron [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Updating instance_info_cache with network_info: [{"id": "629efcb6-6e27-4b06-99d0-469216825a32", "address": "fa:16:3e:d3:93:a2", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629efcb6-6e", "ovs_interfaceid": "629efcb6-6e27-4b06-99d0-469216825a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.246426] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-582a2b7b-90de-4448-986a-cdb7a9393c6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.257698] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1804bc6-ec70-4cb9-8ff2-e388fba03f20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.309857] env[62914]: DEBUG nova.compute.manager [req-8c48bf0b-b595-4df9-80a4-21ee45920fee req-dd88e7af-d4c5-4438-b21a-ff18b2e9d853 service nova] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Detach interface failed, port_id=e597807d-27f1-4d23-b472-414faeba65a8, reason: Instance 4648e825-359d-497f-99b4-cbc51b135860 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1110.312025] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcee2b4-40c9-4f43-8e3e-3cf38a38504a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.320962] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64949} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.323212] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 03960f2e-3263-42f7-a7a4-7d7bcd23cf65/03960f2e-3263-42f7-a7a4-7d7bcd23cf65.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1110.323671] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1110.323831] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-750cbb34-efbe-4d7f-ade6-285c934c9e58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.326991] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2300523e-42d7-4166-a744-3d2618ddf098 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.337019] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1110.337019] env[62914]: value = "task-4832663" [ 1110.337019] env[62914]: _type = "Task" [ 1110.337019] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.370072] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5280cef8-c089-a0ec-dd5c-ec32a1bb13fd, 'name': SearchDatastore_Task, 'duration_secs': 0.056947} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.371406] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.371712] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 422f30c1-fc6a-4c82-9003-806a5959ee8d/422f30c1-fc6a-4c82-9003-806a5959ee8d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1110.372552] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8585f97f-0dff-44b7-bb82-a4045d6ae0a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.378472] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef6a5812-55b7-4c3b-b319-49740cba8e1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.381054] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832663, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.387878] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379035d8-a69c-4e1f-bdc3-2990f34fadb3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.393417] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1110.393417] env[62914]: value = "task-4832664" [ 1110.393417] env[62914]: _type = "Task" [ 1110.393417] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.407926] env[62914]: DEBUG nova.compute.provider_tree [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.415710] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.441974] env[62914]: DEBUG oslo_concurrency.lockutils [req-850468e4-52ef-4057-9191-e60936735238 req-6e978e05-d328-4766-8922-26304191d0ca service nova] Releasing lock "refresh_cache-19f21caa-7d96-4526-bb12-768c4fe4d23e" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.521193] env[62914]: DEBUG nova.compute.manager [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Received event network-vif-plugged-629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1110.521373] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Acquiring lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.521997] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.521997] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.521997] env[62914]: DEBUG nova.compute.manager [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] No waiting events found dispatching network-vif-plugged-629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1110.522180] env[62914]: WARNING nova.compute.manager [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Received unexpected event network-vif-plugged-629efcb6-6e27-4b06-99d0-469216825a32 for instance with vm_state building and task_state spawning. [ 1110.522335] env[62914]: DEBUG nova.compute.manager [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Received event network-changed-629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1110.522507] env[62914]: DEBUG nova.compute.manager [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Refreshing instance network info cache due to event network-changed-629efcb6-6e27-4b06-99d0-469216825a32. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1110.522689] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Acquiring lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.571377] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.676507] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832662, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.701657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.704166] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Instance network_info: |[{"id": "629efcb6-6e27-4b06-99d0-469216825a32", "address": "fa:16:3e:d3:93:a2", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629efcb6-6e", "ovs_interfaceid": "629efcb6-6e27-4b06-99d0-469216825a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1110.704166] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Acquired lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.704166] env[62914]: DEBUG nova.network.neutron [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Refreshing network info cache for port 629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1110.704638] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:93:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '629efcb6-6e27-4b06-99d0-469216825a32', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.712691] env[62914]: DEBUG oslo.service.loopingcall [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1110.713943] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1110.714285] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3563a7d-cb7d-4018-948e-8874ca04a8eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.737271] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.737271] env[62914]: value = "task-4832665" [ 1110.737271] env[62914]: _type = "Task" [ 1110.737271] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.748993] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832665, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.849096] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832663, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155817} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.849408] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.850235] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926a289f-f4f1-4d6f-a159-45ed668603f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.874242] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 03960f2e-3263-42f7-a7a4-7d7bcd23cf65/03960f2e-3263-42f7-a7a4-7d7bcd23cf65.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.874242] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0e3bcfc-f341-4d9e-955c-9d9ccb78aeeb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.895058] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1110.895058] env[62914]: value = "task-4832666" [ 1110.895058] env[62914]: _type = "Task" [ 1110.895058] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.907119] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.911037] env[62914]: DEBUG nova.scheduler.client.report [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1110.914597] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832666, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.072073] env[62914]: DEBUG oslo_vmware.api [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Task: {'id': task-4832661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.663801} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.074036] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1111.074036] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1111.074036] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1111.074036] env[62914]: INFO nova.compute.manager [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1111.074036] env[62914]: DEBUG oslo.service.loopingcall [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.074036] env[62914]: DEBUG nova.compute.manager [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1111.074036] env[62914]: DEBUG nova.network.neutron [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1111.180435] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832662, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.256888] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832665, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.419872] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.924s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.422629] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832664, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.428012] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.930s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.428449] env[62914]: DEBUG nova.objects.instance [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1111.432385] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832666, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.477058] env[62914]: INFO nova.scheduler.client.report [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Deleted allocations for instance 4648e825-359d-497f-99b4-cbc51b135860 [ 1111.683163] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832662, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.719702] env[62914]: DEBUG nova.network.neutron [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Updated VIF entry in instance network info cache for port 629efcb6-6e27-4b06-99d0-469216825a32. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1111.720104] env[62914]: DEBUG nova.network.neutron [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Updating instance_info_cache with network_info: [{"id": "629efcb6-6e27-4b06-99d0-469216825a32", "address": "fa:16:3e:d3:93:a2", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629efcb6-6e", "ovs_interfaceid": "629efcb6-6e27-4b06-99d0-469216825a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.753851] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832665, 'name': CreateVM_Task, 'duration_secs': 0.649136} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.754209] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1111.756123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.756123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.756123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1111.756589] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20e24c70-d177-4643-8b97-e5e93c8bfa0d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.763791] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1111.763791] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529b21ed-5705-c004-ec5e-69cfab9e65f3" [ 1111.763791] env[62914]: _type = "Task" [ 1111.763791] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.780434] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529b21ed-5705-c004-ec5e-69cfab9e65f3, 'name': SearchDatastore_Task, 'duration_secs': 0.011947} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.783312] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.783312] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.783312] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.783312] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.783312] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.783312] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4451a08a-650e-4eda-b6b8-470dc9824486 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.795806] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.795806] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1111.795806] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c5e276-cf85-48eb-b5f4-fed290dcf470 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.802228] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1111.802228] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e639b-5812-43e2-1ba5-398625f5a4c0" [ 1111.802228] env[62914]: _type = "Task" [ 1111.802228] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.814358] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e639b-5812-43e2-1ba5-398625f5a4c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.911223] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832664, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.127899} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.914681] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 422f30c1-fc6a-4c82-9003-806a5959ee8d/422f30c1-fc6a-4c82-9003-806a5959ee8d.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1111.914957] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.915293] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832666, 'name': ReconfigVM_Task, 'duration_secs': 0.777856} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.915506] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a50d794-e03d-4bd7-9c05-552159ad451f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.918237] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 03960f2e-3263-42f7-a7a4-7d7bcd23cf65/03960f2e-3263-42f7-a7a4-7d7bcd23cf65.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.918911] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd7c09f6-7cf9-4a60-9f30-9e01df626d33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.926824] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1111.926824] env[62914]: value = "task-4832667" [ 1111.926824] env[62914]: _type = "Task" [ 1111.926824] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.928805] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1111.928805] env[62914]: value = "task-4832668" [ 1111.928805] env[62914]: _type = "Task" [ 1111.928805] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.943771] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832667, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.948273] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.983355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f56be800-03ab-4595-9134-938a85a79b0b tempest-ImagesOneServerNegativeTestJSON-373251561 tempest-ImagesOneServerNegativeTestJSON-373251561-project-member] Lock "4648e825-359d-497f-99b4-cbc51b135860" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.631s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.180576] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832662, 'name': CreateSnapshot_Task, 'duration_secs': 1.550354} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.180964] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1112.181762] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76906c7-6bd8-4238-b6fa-71957a4dbb17 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.186058] env[62914]: DEBUG nova.network.neutron [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.224656] env[62914]: DEBUG oslo_concurrency.lockutils [req-6bfa09d6-27ff-4e38-bd82-119f2814b295 req-339562dd-79f5-409e-b49b-fb9412580f6a service nova] Releasing lock "refresh_cache-58be0d35-9392-47ad-b87c-a1b66cdc3623" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.319674] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526e639b-5812-43e2-1ba5-398625f5a4c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011696} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.321015] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55f48958-f913-48b9-bf7b-9f8eacf28aa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.330834] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1112.330834] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259711e-480f-a1d5-e562-5ac6e9a0b854" [ 1112.330834] env[62914]: _type = "Task" [ 1112.330834] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.347828] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259711e-480f-a1d5-e562-5ac6e9a0b854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.442884] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832667, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.447256] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56533f66-c657-4259-ab0d-955cc03e3266 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.448417] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.227091} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.449207] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.455s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.449207] env[62914]: DEBUG nova.objects.instance [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62914) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1112.451586] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.452494] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cc6ff5-4c81-4ee7-ab05-567ce8fcc4e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.477870] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 422f30c1-fc6a-4c82-9003-806a5959ee8d/422f30c1-fc6a-4c82-9003-806a5959ee8d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.479706] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c81ff2b-d8b8-4c19-9148-4a613075d958 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.505035] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1112.505035] env[62914]: value = "task-4832669" [ 1112.505035] env[62914]: _type = "Task" [ 1112.505035] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.512365] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.556799] env[62914]: DEBUG nova.compute.manager [req-6234cbcb-6e81-4b9d-9078-d3ddc531ef32 req-e2452b66-9dce-481a-8dd2-274b76020f3a service nova] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Received event network-vif-deleted-00706251-f634-4dcb-9705-105152de241f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1112.567868] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.568707] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.568868] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Cleaning up deleted instances {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11304}} [ 1112.688850] env[62914]: INFO nova.compute.manager [-] [instance: 455965de-816d-4ab2-9d5e-a12b06893e6f] Took 1.61 seconds to deallocate network for instance. [ 1112.704707] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1112.705294] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d4205085-b60c-424d-8feb-3f2c141a650b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.717214] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1112.717214] env[62914]: value = "task-4832670" [ 1112.717214] env[62914]: _type = "Task" [ 1112.717214] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.733198] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.845636] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5259711e-480f-a1d5-e562-5ac6e9a0b854, 'name': SearchDatastore_Task, 'duration_secs': 0.032582} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.846153] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.846627] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 58be0d35-9392-47ad-b87c-a1b66cdc3623/58be0d35-9392-47ad-b87c-a1b66cdc3623.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1112.847099] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc01812f-9fd7-4501-80fa-e99207aa0a7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.857100] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1112.857100] env[62914]: value = "task-4832671" [ 1112.857100] env[62914]: _type = "Task" [ 1112.857100] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.869095] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832671, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.940558] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832667, 'name': Rename_Task, 'duration_secs': 0.624118} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.940928] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1112.941219] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-578d46b2-d5ea-4fa1-acc3-7b3fca426949 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.949511] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1112.949511] env[62914]: value = "task-4832672" [ 1112.949511] env[62914]: _type = "Task" [ 1112.949511] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.963487] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.015400] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.095182] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] There are 54 instances to clean {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11313}} [ 1113.095756] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4648e825-359d-497f-99b4-cbc51b135860] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1113.206850] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.229614] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.370428] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832671, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.462215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f788c79-7557-426f-b885-3b1a790d9b3b tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.463150] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832672, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.463501] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.257s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.463790] env[62914]: DEBUG nova.objects.instance [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lazy-loading 'resources' on Instance uuid 455965de-816d-4ab2-9d5e-a12b06893e6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.516891] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832669, 'name': ReconfigVM_Task, 'duration_secs': 0.901256} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.517423] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 422f30c1-fc6a-4c82-9003-806a5959ee8d/422f30c1-fc6a-4c82-9003-806a5959ee8d.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.518161] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-710ce231-68f7-450e-a79a-81d7a368537a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.528958] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1113.528958] env[62914]: value = "task-4832673" [ 1113.528958] env[62914]: _type = "Task" [ 1113.528958] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.539027] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832673, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.602467] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: c56b9ad3-8c89-44ee-8ee9-8e256bcad573] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1113.729399] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.873403] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832671, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664381} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.873403] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 58be0d35-9392-47ad-b87c-a1b66cdc3623/58be0d35-9392-47ad-b87c-a1b66cdc3623.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1113.873403] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.873403] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8fa0b51-cf2d-4a3b-8c52-1306ecc18786 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.887027] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1113.887027] env[62914]: value = "task-4832674" [ 1113.887027] env[62914]: _type = "Task" [ 1113.887027] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.896252] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832674, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.960377] env[62914]: DEBUG oslo_vmware.api [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832672, 'name': PowerOnVM_Task, 'duration_secs': 0.93943} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.960598] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1113.960813] env[62914]: INFO nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1113.961031] env[62914]: DEBUG nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1113.961890] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d8c96a-61c3-4ff2-ab7a-35cb57d72b14 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.042948] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832673, 'name': Rename_Task, 'duration_secs': 0.274666} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.043169] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1114.043351] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9eed628b-eefc-4bc0-a25a-300acdcdbfa1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.051210] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1114.051210] env[62914]: value = "task-4832675" [ 1114.051210] env[62914]: _type = "Task" [ 1114.051210] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.065014] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.106389] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 6a8d57e1-24d4-49fc-b7ef-ee2fd8224fce] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1114.232611] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.276007] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.276313] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.286039] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9022bf69-392a-4dac-9c1e-6c1fa34e0170 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.295685] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda1a3d9-48f7-49fc-9c35-455135a617e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.328182] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6637d57-6afe-4134-907d-bc848f12314e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.336976] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe607df-6c79-4762-9e43-c1282d645df0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.352970] env[62914]: DEBUG nova.compute.provider_tree [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.396458] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832674, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07417} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.396787] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1114.397717] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c0f587-8db8-43d7-973c-fc01e51e4e49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.423407] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 58be0d35-9392-47ad-b87c-a1b66cdc3623/58be0d35-9392-47ad-b87c-a1b66cdc3623.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.423519] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a3773d9-188b-4816-861d-0f3e75831736 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.444421] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1114.444421] env[62914]: value = "task-4832676" [ 1114.444421] env[62914]: _type = "Task" [ 1114.444421] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.453835] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.485372] env[62914]: INFO nova.compute.manager [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Took 19.08 seconds to build instance. [ 1114.562188] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.609290] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 18329e67-719b-4609-83de-7db2c4096781] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1114.731930] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.778822] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1114.856842] env[62914]: DEBUG nova.scheduler.client.report [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1114.955409] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832676, 'name': ReconfigVM_Task, 'duration_secs': 0.492141} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.955722] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 58be0d35-9392-47ad-b87c-a1b66cdc3623/58be0d35-9392-47ad-b87c-a1b66cdc3623.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.956385] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-879c36cc-26ff-4e79-b3ed-6fa68411a454 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.963364] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1114.963364] env[62914]: value = "task-4832677" [ 1114.963364] env[62914]: _type = "Task" [ 1114.963364] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.974869] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832677, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.989128] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d382d78a-0dea-4216-bdf3-1f0a7b5b1409 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.609s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.062024] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832675, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.112895] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: da2af7d4-f311-444a-aa9f-0744e698defb] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1115.231609] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832670, 'name': CloneVM_Task, 'duration_secs': 2.023291} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.232064] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Created linked-clone VM from snapshot [ 1115.232774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b23613-9251-41ca-84f6-421f4bb31347 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.242564] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Uploading image 598263a1-d518-4327-a7bf-74fedbc43cae {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1115.274868] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1115.274868] env[62914]: value = "vm-942078" [ 1115.274868] env[62914]: _type = "VirtualMachine" [ 1115.274868] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1115.275500] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-75039c61-c381-497d-9008-b8c7058d3230 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.285106] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease: (returnval){ [ 1115.285106] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219699e-ed8c-0a8f-b690-a02f5146e300" [ 1115.285106] env[62914]: _type = "HttpNfcLease" [ 1115.285106] env[62914]: } obtained for exporting VM: (result){ [ 1115.285106] env[62914]: value = "vm-942078" [ 1115.285106] env[62914]: _type = "VirtualMachine" [ 1115.285106] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1115.285106] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the lease: (returnval){ [ 1115.285106] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219699e-ed8c-0a8f-b690-a02f5146e300" [ 1115.285106] env[62914]: _type = "HttpNfcLease" [ 1115.285106] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1115.293960] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1115.293960] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219699e-ed8c-0a8f-b690-a02f5146e300" [ 1115.293960] env[62914]: _type = "HttpNfcLease" [ 1115.293960] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1115.317322] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.364356] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.367104] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.050s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.368873] env[62914]: INFO nova.compute.claims [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.403572] env[62914]: INFO nova.scheduler.client.report [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Deleted allocations for instance 455965de-816d-4ab2-9d5e-a12b06893e6f [ 1115.474382] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832677, 'name': Rename_Task, 'duration_secs': 0.241784} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.474774] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1115.475039] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5eb5d1d8-1425-4c0a-bb08-21769afc98f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.482216] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1115.482216] env[62914]: value = "task-4832679" [ 1115.482216] env[62914]: _type = "Task" [ 1115.482216] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.492662] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.566458] env[62914]: DEBUG oslo_vmware.api [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832675, 'name': PowerOnVM_Task, 'duration_secs': 1.181645} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.566860] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1115.567205] env[62914]: INFO nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1115.567507] env[62914]: DEBUG nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1115.568683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2148c12c-26ca-4c2d-be04-6bbb268ae842 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.616571] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 68a77363-c25b-426e-86e2-fa31fc6f0ee1] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1115.802374] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1115.802374] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219699e-ed8c-0a8f-b690-a02f5146e300" [ 1115.802374] env[62914]: _type = "HttpNfcLease" [ 1115.802374] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1115.803028] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1115.803028] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5219699e-ed8c-0a8f-b690-a02f5146e300" [ 1115.803028] env[62914]: _type = "HttpNfcLease" [ 1115.803028] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1115.804094] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ac5d8a-af8f-4cb3-9551-933f8eb861fd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.815157] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1115.815420] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1115.918637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7961c763-155d-4825-8fef-7463909f4bdd tempest-ServersNegativeTestJSON-170677023 tempest-ServersNegativeTestJSON-170677023-project-member] Lock "455965de-816d-4ab2-9d5e-a12b06893e6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.498s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.924065] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-25b1b97b-102d-49e2-bbb6-ea3d8727b8b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.994542] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832679, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.092380] env[62914]: INFO nova.compute.manager [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Took 17.55 seconds to build instance. [ 1116.119671] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 417d4287-0f76-4d2e-b1da-43455d7ff3e6] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1116.210692] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.211076] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.211358] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.211587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.211824] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.214193] env[62914]: INFO nova.compute.manager [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Terminating instance [ 1116.216152] env[62914]: DEBUG nova.compute.manager [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1116.216381] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1116.217980] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a56d9e-5bd3-4e5d-aea1-c71ba6dfe97b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.227274] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1116.227615] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7e6f444-06be-4771-985e-42b6d153ee91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.235434] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1116.235434] env[62914]: value = "task-4832680" [ 1116.235434] env[62914]: _type = "Task" [ 1116.235434] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.246195] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.495912] env[62914]: DEBUG oslo_vmware.api [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832679, 'name': PowerOnVM_Task, 'duration_secs': 0.895346} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.496884] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1116.500670] env[62914]: INFO nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1116.500670] env[62914]: DEBUG nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1116.503750] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512d43a7-d078-4f51-820c-beb415047717 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.593318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-52625c5f-9025-416a-99da-e9e7086b6ebc tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.065s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.624482] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 79c7728a-0452-44ec-91de-62e3f09f9183] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1116.719278] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80875184-0100-4be3-adac-50c91e771687 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.730178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a34f8f-4267-4b01-913d-a6b97b142a38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.767598] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ed79e6-43dc-46ba-8e4e-07f045c64bf1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.773809] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832680, 'name': PowerOffVM_Task, 'duration_secs': 0.355853} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.774645] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1116.774925] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1116.775306] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a68106d8-9136-48ba-8a4e-701bb23593c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.788078] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e14bbd-55ea-4883-83f0-da2b51e1bfa9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.804566] env[62914]: DEBUG nova.compute.provider_tree [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.866555] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1116.866930] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1116.867198] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore1] 03960f2e-3263-42f7-a7a4-7d7bcd23cf65 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.867503] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f97ce6a2-3740-4f7f-9c08-121403408d5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.877394] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1116.877394] env[62914]: value = "task-4832682" [ 1116.877394] env[62914]: _type = "Task" [ 1116.877394] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.892183] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.027021] env[62914]: INFO nova.compute.manager [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Took 15.48 seconds to build instance. [ 1117.126208] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: dcf5a6d6-13ef-4d4f-8d9a-23c268cf9fb1] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1117.309358] env[62914]: DEBUG nova.scheduler.client.report [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1117.388533] env[62914]: DEBUG oslo_vmware.api [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347316} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.388981] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.389280] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1117.389499] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1117.389863] env[62914]: INFO nova.compute.manager [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1117.390214] env[62914]: DEBUG oslo.service.loopingcall [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.390440] env[62914]: DEBUG nova.compute.manager [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1117.390658] env[62914]: DEBUG nova.network.neutron [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1117.464627] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.466626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.466879] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.467090] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.467271] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.469623] env[62914]: INFO nova.compute.manager [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Terminating instance [ 1117.471619] env[62914]: DEBUG nova.compute.manager [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1117.471824] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1117.472708] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c73c717-332e-4bc8-b9c7-c028e38e8b69 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.482677] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1117.483652] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-654387ef-30c5-4575-aa67-e603a8767b2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.491759] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1117.491759] env[62914]: value = "task-4832683" [ 1117.491759] env[62914]: _type = "Task" [ 1117.491759] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.503014] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.529944] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fd6aa1a3-2257-472e-bec9-6a741e765eac tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.985s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.631315] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 120fa16e-60cd-4326-b6c4-f1df419dbcb7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1117.814911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.816176] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1118.007549] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832683, 'name': PowerOffVM_Task, 'duration_secs': 0.409257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.007924] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1118.008561] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1118.008887] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e614e3d-0f90-4fc2-9bd0-ab26dd74d61b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.084218] env[62914]: DEBUG nova.compute.manager [req-93351d21-f0b7-4612-8d8f-838eaab0a67f req-68233a37-e01d-4ee3-b2dd-98f21cb29a73 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Received event network-vif-deleted-7cfd599e-0580-46fa-95e7-a1412897fede {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1118.084435] env[62914]: INFO nova.compute.manager [req-93351d21-f0b7-4612-8d8f-838eaab0a67f req-68233a37-e01d-4ee3-b2dd-98f21cb29a73 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Neutron deleted interface 7cfd599e-0580-46fa-95e7-a1412897fede; detaching it from the instance and deleting it from the info cache [ 1118.084610] env[62914]: DEBUG nova.network.neutron [req-93351d21-f0b7-4612-8d8f-838eaab0a67f req-68233a37-e01d-4ee3-b2dd-98f21cb29a73 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.091242] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1118.091569] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1118.091858] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Deleting the datastore file [datastore1] 422f30c1-fc6a-4c82-9003-806a5959ee8d {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1118.092255] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4278d83-8003-4217-8587-30c0d7038c20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.101487] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for the task: (returnval){ [ 1118.101487] env[62914]: value = "task-4832685" [ 1118.101487] env[62914]: _type = "Task" [ 1118.101487] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.116013] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.138635] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: b1a9b6c6-d249-4cfd-a37c-a657eabc3c6d] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1118.322269] env[62914]: DEBUG nova.compute.utils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1118.324047] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Not allocating networking since 'none' was specified. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1118.415242] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e6baed-3f03-4c18-a562-6622c5e32e99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.424934] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Suspending the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1118.425507] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e9e23114-db7e-41f2-911a-c60d32c20e04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.432803] env[62914]: DEBUG oslo_vmware.api [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1118.432803] env[62914]: value = "task-4832686" [ 1118.432803] env[62914]: _type = "Task" [ 1118.432803] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.441726] env[62914]: DEBUG oslo_vmware.api [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832686, 'name': SuspendVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.550981] env[62914]: DEBUG nova.network.neutron [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.587552] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb044b84-0d89-48a5-b28f-17ebd3b7f5e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.598777] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe6a202-d23f-4b84-b2be-b9d79ab719f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.623842] env[62914]: DEBUG oslo_vmware.api [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Task: {'id': task-4832685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328843} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.624207] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.624526] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1118.624643] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1118.624807] env[62914]: INFO nova.compute.manager [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1118.625092] env[62914]: DEBUG oslo.service.loopingcall [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.625355] env[62914]: DEBUG nova.compute.manager [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1118.625459] env[62914]: DEBUG nova.network.neutron [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1118.646261] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: fa33e1a5-677a-489c-8c89-a33066b18103] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1118.648440] env[62914]: DEBUG nova.compute.manager [req-93351d21-f0b7-4612-8d8f-838eaab0a67f req-68233a37-e01d-4ee3-b2dd-98f21cb29a73 service nova] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Detach interface failed, port_id=7cfd599e-0580-46fa-95e7-a1412897fede, reason: Instance 03960f2e-3263-42f7-a7a4-7d7bcd23cf65 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1118.825584] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1118.945239] env[62914]: DEBUG oslo_vmware.api [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832686, 'name': SuspendVM_Task} progress is 58%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.055079] env[62914]: INFO nova.compute.manager [-] [instance: 03960f2e-3263-42f7-a7a4-7d7bcd23cf65] Took 1.66 seconds to deallocate network for instance. [ 1119.150588] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: b443050b-78ae-4f9d-81d4-508f5cf4a322] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1119.447464] env[62914]: DEBUG oslo_vmware.api [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832686, 'name': SuspendVM_Task, 'duration_secs': 0.744379} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.447901] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Suspended the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1119.448186] env[62914]: DEBUG nova.compute.manager [None req-004de518-cfcb-43bf-a373-85a2183ae22e tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1119.449080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b629398-a4da-47cd-bc38-3d8dd64ccdb0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.463425] env[62914]: DEBUG nova.network.neutron [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.564666] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.565034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.566382] env[62914]: DEBUG nova.objects.instance [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid 03960f2e-3263-42f7-a7a4-7d7bcd23cf65 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.653404] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: dac99ed2-aed9-4c3e-bcab-a8de9967990c] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1119.835131] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1119.867806] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1119.868109] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1119.868284] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.868567] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1119.868690] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.868867] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1119.869127] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1119.869318] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1119.869503] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1119.869681] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1119.869922] env[62914]: DEBUG nova.virt.hardware [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1119.870939] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14f4613-6d59-49ca-82fb-def4f1b79dcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.879900] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd56722d-0926-4b8d-87f0-381ed61d96d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.895374] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Instance VIF info [] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1119.901231] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Creating folder: Project (b771ccc3f109485793c09566f824006f). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1119.901629] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfd1166f-4fc4-4ab9-8813-45bb6b926520 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.913136] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Created folder: Project (b771ccc3f109485793c09566f824006f) in parent group-v941773. [ 1119.913404] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Creating folder: Instances. Parent ref: group-v942079. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1119.913688] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84343355-bc4f-48d6-9d80-4de81a65eb5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.923593] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Created folder: Instances in parent group-v942079. [ 1119.923906] env[62914]: DEBUG oslo.service.loopingcall [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1119.924211] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1119.924459] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74feb764-c15c-4241-9b40-c8a2083f6227 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.942568] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1119.942568] env[62914]: value = "task-4832689" [ 1119.942568] env[62914]: _type = "Task" [ 1119.942568] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.950954] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832689, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.965659] env[62914]: INFO nova.compute.manager [-] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Took 1.34 seconds to deallocate network for instance. [ 1120.129277] env[62914]: DEBUG nova.compute.manager [req-1d17844f-96f6-43a4-afa8-10ea9f20c555 req-fd51dc8a-a861-48e1-b9d0-a2c7fe677784 service nova] [instance: 422f30c1-fc6a-4c82-9003-806a5959ee8d] Received event network-vif-deleted-9493c2a8-67b9-476a-b101-494b5cab84b3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1120.156994] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: d9476d24-fbc5-4e30-bf67-85c388e943fd] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1120.371803] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82252339-6233-4219-8d2a-34be1ccef58c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.387024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea885a47-3709-4835-8f1b-d81d75512de1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.425557] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60c7887-2030-43ae-b759-ac91cd85e369 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.436950] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19571c88-5df2-4bc2-8303-79ddc930330e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.460018] env[62914]: DEBUG nova.compute.provider_tree [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.466824] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832689, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.472617] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.663696] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: af541b15-19ce-415a-b03e-cb605b780247] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1120.954560] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832689, 'name': CreateVM_Task, 'duration_secs': 0.6601} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.954760] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1120.955263] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.955440] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.955790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1120.956077] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1294565c-3722-464d-809f-8f7c96bf0a57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.962378] env[62914]: DEBUG nova.scheduler.client.report [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1120.965776] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1120.965776] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cea79-0cf2-8f96-f7db-01cd3b2e4f51" [ 1120.965776] env[62914]: _type = "Task" [ 1120.965776] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.977693] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cea79-0cf2-8f96-f7db-01cd3b2e4f51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.997727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.998043] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.998626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.998626] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.998751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.001869] env[62914]: INFO nova.compute.manager [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Terminating instance [ 1121.004071] env[62914]: DEBUG nova.compute.manager [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1121.004299] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1121.005269] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfb7d16-2f15-4c70-808c-c8200a31f68f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.014518] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1121.014806] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-654fb723-6743-4d61-bf43-a0b2ef003878 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.084730] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1121.085138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1121.085439] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore2] 58be0d35-9392-47ad-b87c-a1b66cdc3623 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.085916] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ac4fe8c-07ee-4a99-878b-4e65156566af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.094253] env[62914]: DEBUG oslo_vmware.api [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1121.094253] env[62914]: value = "task-4832691" [ 1121.094253] env[62914]: _type = "Task" [ 1121.094253] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.104080] env[62914]: DEBUG oslo_vmware.api [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.165678] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3e6a3787-3e9c-411c-9c3c-305a62061b47] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1121.467318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.472790] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.475399] env[62914]: DEBUG nova.objects.instance [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lazy-loading 'resources' on Instance uuid 422f30c1-fc6a-4c82-9003-806a5959ee8d {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.481553] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524cea79-0cf2-8f96-f7db-01cd3b2e4f51, 'name': SearchDatastore_Task, 'duration_secs': 0.02073} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.482531] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.482793] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1121.483075] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.483247] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.483841] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1121.484057] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6260d9f-4421-4455-981c-c0860b9d0801 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.499130] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1121.499375] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1121.500554] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b573745a-ea75-45bf-a690-106cb59ab0c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.508271] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1121.508271] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc77c5-4f1c-4c68-5589-e87d4c824c9a" [ 1121.508271] env[62914]: _type = "Task" [ 1121.508271] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.518061] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc77c5-4f1c-4c68-5589-e87d4c824c9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.594879] env[62914]: INFO nova.scheduler.client.report [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance 03960f2e-3263-42f7-a7a4-7d7bcd23cf65 [ 1121.610529] env[62914]: DEBUG oslo_vmware.api [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256354} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.610824] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1121.611091] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1121.611454] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1121.611660] env[62914]: INFO nova.compute.manager [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1121.611956] env[62914]: DEBUG oslo.service.loopingcall [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1121.612206] env[62914]: DEBUG nova.compute.manager [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1121.612300] env[62914]: DEBUG nova.network.neutron [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1121.669111] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 13f2a615-aa95-411d-92f8-9ff1b6eba420] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1122.026475] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52fc77c5-4f1c-4c68-5589-e87d4c824c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.016329} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.030638] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdac8eda-b83b-4b0b-9874-ed97959e675d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.038333] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1122.038333] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52174801-70d5-397a-16b6-3b7a2ddaab50" [ 1122.038333] env[62914]: _type = "Task" [ 1122.038333] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.050029] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52174801-70d5-397a-16b6-3b7a2ddaab50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.110358] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1334e005-7d0b-4c15-81e0-1584f2e57408 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "03960f2e-3263-42f7-a7a4-7d7bcd23cf65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.899s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.174021] env[62914]: DEBUG nova.compute.manager [req-ee9f9161-5384-468b-a1a6-63e351b137b1 req-b1aeaad6-e247-4916-ae61-a07a68e7a46c service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Received event network-vif-deleted-629efcb6-6e27-4b06-99d0-469216825a32 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1122.174021] env[62914]: INFO nova.compute.manager [req-ee9f9161-5384-468b-a1a6-63e351b137b1 req-b1aeaad6-e247-4916-ae61-a07a68e7a46c service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Neutron deleted interface 629efcb6-6e27-4b06-99d0-469216825a32; detaching it from the instance and deleting it from the info cache [ 1122.174021] env[62914]: DEBUG nova.network.neutron [req-ee9f9161-5384-468b-a1a6-63e351b137b1 req-b1aeaad6-e247-4916-ae61-a07a68e7a46c service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.179115] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 47aa2783-367e-4445-8261-7c75eb7561ab] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1122.288380] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708ea077-b354-4897-ae1c-4fea42aaf6aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.297435] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084f8aa1-5d15-4186-8c28-c76ede661860 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.331172] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4166578d-edb9-4ef7-a8a1-916de271890e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.340060] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0396fc4c-f66b-4693-9445-fd96a842637b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.357968] env[62914]: DEBUG nova.compute.provider_tree [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.554742] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52174801-70d5-397a-16b6-3b7a2ddaab50, 'name': SearchDatastore_Task, 'duration_secs': 0.029984} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.555047] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.555083] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4af05599-f754-4f81-bcbd-019d7ee58fc5/4af05599-f754-4f81-bcbd-019d7ee58fc5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1122.555360] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f95750ad-4843-4005-b870-abdd52dae249 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.564229] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1122.564229] env[62914]: value = "task-4832692" [ 1122.564229] env[62914]: _type = "Task" [ 1122.564229] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.576737] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.614425] env[62914]: DEBUG nova.network.neutron [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.676829] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e392a04-0000-4171-b859-7f94827f5940 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.680020] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ec73b924-e132-44b6-bc67-2b3c08592f03] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1122.688854] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a1ba01-0a61-48eb-bf27-0bfba7b13021 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.753452] env[62914]: DEBUG nova.compute.manager [req-ee9f9161-5384-468b-a1a6-63e351b137b1 req-b1aeaad6-e247-4916-ae61-a07a68e7a46c service nova] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Detach interface failed, port_id=629efcb6-6e27-4b06-99d0-469216825a32, reason: Instance 58be0d35-9392-47ad-b87c-a1b66cdc3623 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1122.862065] env[62914]: DEBUG nova.scheduler.client.report [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1123.076744] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.117371] env[62914]: INFO nova.compute.manager [-] [instance: 58be0d35-9392-47ad-b87c-a1b66cdc3623] Took 1.50 seconds to deallocate network for instance. [ 1123.186326] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: aedc785f-619f-4b9f-850f-790f84e57577] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1123.247953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.248265] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.368411] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.389676] env[62914]: INFO nova.scheduler.client.report [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Deleted allocations for instance 422f30c1-fc6a-4c82-9003-806a5959ee8d [ 1123.577878] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.628302] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.628598] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.628849] env[62914]: DEBUG nova.objects.instance [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 58be0d35-9392-47ad-b87c-a1b66cdc3623 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.666872] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.666872] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.688282] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 769c3873-7480-47de-894b-40dbf3f2f7f0] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1123.751651] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1123.901474] env[62914]: DEBUG oslo_concurrency.lockutils [None req-70ba0449-77db-4184-ad68-3957d4d3c871 tempest-ServerMetadataNegativeTestJSON-311831323 tempest-ServerMetadataNegativeTestJSON-311831323-project-member] Lock "422f30c1-fc6a-4c82-9003-806a5959ee8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.435s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.079721] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.169482] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1124.194379] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: bdec185e-2af7-4379-8c67-03e125750bb4] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1124.275501] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.382313] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfb366d-ceed-4167-91dc-45c70b09937a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.391753] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2658e0e-99eb-4661-8662-e60741241fb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.425664] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053fa12-784a-4e30-a1db-5c3b3420f59a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.434475] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7584bb-33e5-4d28-8e93-82dad562e376 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.452138] env[62914]: DEBUG nova.compute.provider_tree [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.460173] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.461032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.579867] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.689364] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.698097] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 55192659-4d65-4e74-a47f-46d650b6b095] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1124.955396] env[62914]: DEBUG nova.scheduler.client.report [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1124.964179] env[62914]: DEBUG nova.compute.utils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1125.078771] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832692, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.430935} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.078966] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 4af05599-f754-4f81-bcbd-019d7ee58fc5/4af05599-f754-4f81-bcbd-019d7ee58fc5.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1125.079164] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.079529] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05c8abf1-910a-4712-ba06-f4c97b6a7ccc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.088082] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1125.088082] env[62914]: value = "task-4832693" [ 1125.088082] env[62914]: _type = "Task" [ 1125.088082] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.096645] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.202060] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 43227b1e-c90a-47d0-a4f5-fd0af0826e94] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1125.461638] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.464295] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.188s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.465302] env[62914]: INFO nova.compute.claims [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1125.470777] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.502254] env[62914]: INFO nova.scheduler.client.report [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 58be0d35-9392-47ad-b87c-a1b66cdc3623 [ 1125.565253] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1125.566178] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985f6149-f6f4-43fb-8867-01a944fcad30 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.573651] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1125.573832] env[62914]: ERROR oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk due to incomplete transfer. [ 1125.574149] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-869bf6f7-6a4a-4cd4-a1aa-7b8a670626d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.582784] env[62914]: DEBUG oslo_vmware.rw_handles [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c7e75-a9cc-4189-76b8-befdf0bb0739/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1125.583068] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Uploaded image 598263a1-d518-4327-a7bf-74fedbc43cae to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1125.586142] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1125.586142] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c7c733b3-47c5-47fd-9adf-958482215570 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.596471] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125013} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.598488] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.599232] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1125.599232] env[62914]: value = "task-4832694" [ 1125.599232] env[62914]: _type = "Task" [ 1125.599232] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.601166] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce91869-2c24-45a0-b5a7-be7ffffe1a01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.613580] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832694, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.629713] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 4af05599-f754-4f81-bcbd-019d7ee58fc5/4af05599-f754-4f81-bcbd-019d7ee58fc5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.630088] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4749b609-5a71-4ae5-9eeb-3b70010bb648 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.651421] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1125.651421] env[62914]: value = "task-4832695" [ 1125.651421] env[62914]: _type = "Task" [ 1125.651421] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.661062] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832695, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.706421] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 83de3d7c-2308-4678-ae90-a30705f6a8c4] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1126.012454] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2cec7b37-dc0e-4982-9110-913c8147ebc0 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "58be0d35-9392-47ad-b87c-a1b66cdc3623" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.014s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.115494] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832694, 'name': Destroy_Task, 'duration_secs': 0.442038} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.115659] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Destroyed the VM [ 1126.115839] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1126.116159] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a2144f84-cb36-4ef4-a01f-43e7ba886b41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.123814] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1126.123814] env[62914]: value = "task-4832696" [ 1126.123814] env[62914]: _type = "Task" [ 1126.123814] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.133127] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832696, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.162555] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.209618] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: cca4bbf9-8864-4805-b95e-954e6b570eae] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1126.552603] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.552603] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.552603] env[62914]: INFO nova.compute.manager [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Attaching volume 548d77fc-3693-4e6f-8097-f2402ca0f874 to /dev/sdb [ 1126.593108] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d39bb41-0b62-482b-994f-f69694ffbbc0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.602978] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac96916-47bc-4956-964e-e067de1265dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.618042] env[62914]: DEBUG nova.virt.block_device [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating existing volume attachment record: bcc61ec6-855b-44a9-a3cd-309f15e17fa7 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1126.634203] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832696, 'name': RemoveSnapshot_Task, 'duration_secs': 0.347953} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.637216] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1126.637290] env[62914]: DEBUG nova.compute.manager [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1126.638238] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f664ba9-3388-40e5-b99c-c81de6d0a6f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.662429] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832695, 'name': ReconfigVM_Task, 'duration_secs': 0.526712} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.662815] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 4af05599-f754-4f81-bcbd-019d7ee58fc5/4af05599-f754-4f81-bcbd-019d7ee58fc5.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.666218] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e1b2628-29f2-4893-8f31-bfda64b7d6cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.674192] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1126.674192] env[62914]: value = "task-4832697" [ 1126.674192] env[62914]: _type = "Task" [ 1126.674192] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.711212] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e3ae6f-388b-47f7-96e1-e27459208240 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.714546] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: d47ee5d0-9bd9-42de-88b8-6eb1a7ca076f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1126.727027] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b9dd6f-024c-4cf1-bf53-290e9eb1fe0a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.760206] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93925cbc-cac3-4bf7-9c02-305723b01e95 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.773846] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45da7771-99f9-49af-9641-b2a4a4a8cd8a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.791700] env[62914]: DEBUG nova.compute.provider_tree [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.153197] env[62914]: INFO nova.compute.manager [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Shelve offloading [ 1127.154933] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1127.155230] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb86ef15-909a-4ee5-813a-d92ef6633daf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.164032] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1127.164032] env[62914]: value = "task-4832701" [ 1127.164032] env[62914]: _type = "Task" [ 1127.164032] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.172988] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.183382] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832697, 'name': Rename_Task, 'duration_secs': 0.23269} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.183733] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1127.184020] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f043f9be-9b84-4166-a838-607f05f75f59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.190608] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1127.190608] env[62914]: value = "task-4832702" [ 1127.190608] env[62914]: _type = "Task" [ 1127.190608] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.198755] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.218515] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: ea06d3c3-d836-4e66-ac66-42f9886cd5de] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1127.269335] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.269585] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.269787] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.269999] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.270215] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.272027] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.272237] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.273460] env[62914]: INFO nova.compute.manager [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Terminating instance [ 1127.277860] env[62914]: DEBUG nova.compute.manager [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1127.278093] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1127.278949] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f27f5f-5298-4546-8e8d-7da100e39629 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.290396] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1127.290850] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae1e058c-ce1c-4670-82d4-dd622e7c0b96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.295139] env[62914]: DEBUG nova.scheduler.client.report [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1127.300140] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1127.300140] env[62914]: value = "task-4832703" [ 1127.300140] env[62914]: _type = "Task" [ 1127.300140] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.311199] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.676347] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1127.676608] env[62914]: DEBUG nova.compute.manager [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1127.677409] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b87bf53-7b2e-4080-87fa-ee757085b097 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.683978] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.684169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.684363] env[62914]: DEBUG nova.network.neutron [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1127.700256] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832702, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.721602] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 12e8b0ac-0dec-4928-ae65-ab53992ecab5] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1127.776909] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1127.802267] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.802905] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1127.806161] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.117s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.808418] env[62914]: INFO nova.compute.claims [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.821718] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832703, 'name': PowerOffVM_Task, 'duration_secs': 0.251967} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.822069] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1127.822260] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1127.822555] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea2b9a10-66d8-4dd5-91b3-a5cd8d7db021 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.923461] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1127.923712] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1127.923889] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleting the datastore file [datastore1] 67ecc3a1-03b0-4881-b5c4-9c4fa244b292 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.924161] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c67b1896-9be5-425b-b479-b97c8b63c215 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.930577] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1127.930577] env[62914]: value = "task-4832705" [ 1127.930577] env[62914]: _type = "Task" [ 1127.930577] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.938281] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.200396] env[62914]: DEBUG oslo_vmware.api [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832702, 'name': PowerOnVM_Task, 'duration_secs': 0.664907} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.200743] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1128.201026] env[62914]: INFO nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1128.201255] env[62914]: DEBUG nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1128.202179] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8c5c6d-42a7-47c4-b640-b093e7d37457 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.225601] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: c488ba7b-68cc-4876-934f-a11d33fca6ab] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1128.297973] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.318548] env[62914]: DEBUG nova.compute.utils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1128.320656] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1128.320838] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1128.388673] env[62914]: DEBUG nova.policy [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a40a14d9e19a4e2894245814173656eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1780142384594b1dabc6811b54144d56', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1128.442420] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.503365] env[62914]: DEBUG nova.network.neutron [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.704639] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Successfully created port: 5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.723866] env[62914]: INFO nova.compute.manager [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Took 13.43 seconds to build instance. [ 1128.730501] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 982936be-3cb1-4930-b135-8fc2019c5216] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1128.825182] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1128.944814] env[62914]: DEBUG oslo_vmware.api [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.724955} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.947729] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1128.948069] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1128.948199] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1128.948431] env[62914]: INFO nova.compute.manager [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1128.948731] env[62914]: DEBUG oslo.service.loopingcall [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.949176] env[62914]: DEBUG nova.compute.manager [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1128.949274] env[62914]: DEBUG nova.network.neutron [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1129.005573] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.104340] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50cb746-7cd9-459d-956c-c4e3cb789fa5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.114686] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18aa86c-d8f6-449a-acc4-290680dd59e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.155322] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb0ae89-aa18-4276-bc44-27fc513bdafc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.165995] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f42fa3-2a5a-4b56-97f8-895382ac0a59 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.182669] env[62914]: DEBUG nova.compute.provider_tree [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.224836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bec69c18-77a8-4be6-b576-48633631c193 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.948s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.233815] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 32e8f18e-2116-43bd-9951-ad809ab95ba2] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1129.412652] env[62914]: DEBUG nova.compute.manager [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-vif-unplugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1129.412897] env[62914]: DEBUG oslo_concurrency.lockutils [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.413159] env[62914]: DEBUG oslo_concurrency.lockutils [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.413350] env[62914]: DEBUG oslo_concurrency.lockutils [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.413714] env[62914]: DEBUG nova.compute.manager [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] No waiting events found dispatching network-vif-unplugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1129.413859] env[62914]: WARNING nova.compute.manager [req-3dde6c94-245a-459c-8e1d-354c9ee41765 req-911ae941-9671-48bd-ab6c-1e1808b954f1 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received unexpected event network-vif-unplugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 for instance with vm_state shelved and task_state shelving_offloading. [ 1129.490222] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1129.490367] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d1b393-861b-4fc3-b68f-d41278531b7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.501141] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1129.501445] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9584778b-6c8e-4772-8c37-405e4be90d2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.543191] env[62914]: DEBUG nova.compute.manager [req-2ae5c8f2-986a-4c5a-b108-647b89e5c3ee req-09e952e8-8c73-4477-aa68-a58a25980811 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Received event network-vif-deleted-371d22eb-8e46-423e-b4cc-a52d3dbc0879 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1129.543191] env[62914]: INFO nova.compute.manager [req-2ae5c8f2-986a-4c5a-b108-647b89e5c3ee req-09e952e8-8c73-4477-aa68-a58a25980811 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Neutron deleted interface 371d22eb-8e46-423e-b4cc-a52d3dbc0879; detaching it from the instance and deleting it from the info cache [ 1129.543191] env[62914]: DEBUG nova.network.neutron [req-2ae5c8f2-986a-4c5a-b108-647b89e5c3ee req-09e952e8-8c73-4477-aa68-a58a25980811 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.570867] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1129.571187] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1129.571456] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore2] e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.571764] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17616f55-61ca-4e21-ae83-8a4467f8aa2a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.578609] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1129.578609] env[62914]: value = "task-4832708" [ 1129.578609] env[62914]: _type = "Task" [ 1129.578609] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.587842] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.685876] env[62914]: DEBUG nova.scheduler.client.report [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1129.738052] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: bd81fcb7-abef-4b86-8dce-f07b1c226f2f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1129.836626] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1129.869409] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1129.869711] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1129.869916] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.870175] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1129.870454] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.870658] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1129.870920] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1129.871151] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1129.871406] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1129.871641] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1129.871922] env[62914]: DEBUG nova.virt.hardware [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1129.872814] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4002d138-93e3-45db-98a0-31441a76a6f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.875906] env[62914]: DEBUG nova.compute.manager [None req-1c1e15da-5c31-4f0f-8755-f195f748a254 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1129.876727] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e0cd81-582f-4304-9071-f45cb7d6a552 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.887600] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac17f4a7-a93a-468c-961a-586c51f25b5b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.976640] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.976940] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.977174] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "4af05599-f754-4f81-bcbd-019d7ee58fc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.977363] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.977578] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.979836] env[62914]: INFO nova.compute.manager [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Terminating instance [ 1129.981577] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "refresh_cache-4af05599-f754-4f81-bcbd-019d7ee58fc5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.981778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquired lock "refresh_cache-4af05599-f754-4f81-bcbd-019d7ee58fc5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.981903] env[62914]: DEBUG nova.network.neutron [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1129.996216] env[62914]: DEBUG nova.network.neutron [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.046048] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2e558b2-626c-4a07-a073-324879411381 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.055807] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06e1b13-e01d-4edd-8217-d29f58ab86f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.091508] env[62914]: DEBUG nova.compute.manager [req-2ae5c8f2-986a-4c5a-b108-647b89e5c3ee req-09e952e8-8c73-4477-aa68-a58a25980811 service nova] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Detach interface failed, port_id=371d22eb-8e46-423e-b4cc-a52d3dbc0879, reason: Instance 67ecc3a1-03b0-4881-b5c4-9c4fa244b292 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1130.100161] env[62914]: DEBUG oslo_vmware.api [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167997} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.100454] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.100714] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1130.100971] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1130.125319] env[62914]: INFO nova.scheduler.client.report [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted allocations for instance e730b472-fca8-4041-a00c-91bee25232f7 [ 1130.191803] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.192347] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1130.194897] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.897s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.196301] env[62914]: INFO nova.compute.claims [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.241112] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 397c5401-a435-4170-b07d-a03488c73867] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1130.320934] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Successfully updated port: 5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.396453] env[62914]: INFO nova.compute.manager [None req-1c1e15da-5c31-4f0f-8755-f195f748a254 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] instance snapshotting [ 1130.396884] env[62914]: DEBUG nova.objects.instance [None req-1c1e15da-5c31-4f0f-8755-f195f748a254 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lazy-loading 'flavor' on Instance uuid 4af05599-f754-4f81-bcbd-019d7ee58fc5 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.498833] env[62914]: INFO nova.compute.manager [-] [instance: 67ecc3a1-03b0-4881-b5c4-9c4fa244b292] Took 1.55 seconds to deallocate network for instance. [ 1130.501344] env[62914]: DEBUG nova.network.neutron [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1130.555274] env[62914]: DEBUG nova.network.neutron [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.629300] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.700964] env[62914]: DEBUG nova.compute.utils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1130.704302] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1130.704478] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1130.744042] env[62914]: DEBUG nova.policy [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '219e4e5bc6d94f6cb63cf1f289d2a6e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b19293a423174c20963c000441db100e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1130.745768] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 557c0538-fc4a-403a-a9cb-b706e2260b1c] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1130.823714] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.824064] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.824291] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1130.902711] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fb4f9c-2810-4837-a999-c14a80851f24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.921609] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4b5eb4-908a-442f-bc53-939a0db5945d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.008449] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.057847] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Releasing lock "refresh_cache-4af05599-f754-4f81-bcbd-019d7ee58fc5" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.058302] env[62914]: DEBUG nova.compute.manager [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1131.058736] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1131.059744] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e314065b-1c92-43c0-bf26-df3d8ded7eb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.068419] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1131.068540] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-defcd234-f6bb-4339-8ac0-f082b14e3091 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.071660] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Successfully created port: 7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1131.078379] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1131.078379] env[62914]: value = "task-4832709" [ 1131.078379] env[62914]: _type = "Task" [ 1131.078379] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.090987] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.169913] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1131.170315] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942083', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'name': 'volume-548d77fc-3693-4e6f-8097-f2402ca0f874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a', 'attached_at': '', 'detached_at': '', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'serial': '548d77fc-3693-4e6f-8097-f2402ca0f874'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1131.171272] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6798e73-f38d-490c-88bb-2b6b3e72de25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.200040] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab3f448-e762-463e-b0e6-ba3c9d1f0d2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.205826] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1131.236018] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-548d77fc-3693-4e6f-8097-f2402ca0f874/volume-548d77fc-3693-4e6f-8097-f2402ca0f874.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.238162] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d462eb5-6a67-4608-bb16-9581f7487b0f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.253734] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4911baea-15df-46db-be11-fcf998eb0cb6] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1131.268282] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1131.268282] env[62914]: value = "task-4832710" [ 1131.268282] env[62914]: _type = "Task" [ 1131.268282] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.282800] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832710, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.375365] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1131.434749] env[62914]: DEBUG nova.compute.manager [None req-1c1e15da-5c31-4f0f-8755-f195f748a254 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Instance disappeared during snapshot {{(pid=62914) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1131.450017] env[62914]: DEBUG nova.compute.manager [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1131.450017] env[62914]: DEBUG nova.compute.manager [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing instance network info cache due to event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1131.450017] env[62914]: DEBUG oslo_concurrency.lockutils [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.450017] env[62914]: DEBUG oslo_concurrency.lockutils [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.450017] env[62914]: DEBUG nova.network.neutron [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1131.520234] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a83becd-56ef-42d1-ba6e-349c02a634cf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.525838] env[62914]: DEBUG nova.network.neutron [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.529557] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c3435f-2830-40b0-aa6b-03541e887512 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.568874] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1023df-5f1a-49be-b6ee-00832db6825a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.578756] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16b9f09-0de7-47b6-9405-2a41e524a4f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.585248] env[62914]: DEBUG nova.compute.manager [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Received event network-vif-plugged-5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1131.585498] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.585711] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.585884] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.586068] env[62914]: DEBUG nova.compute.manager [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] No waiting events found dispatching network-vif-plugged-5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1131.586242] env[62914]: WARNING nova.compute.manager [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Received unexpected event network-vif-plugged-5e05c7fc-1efe-4e76-b521-ac8bcee07403 for instance with vm_state building and task_state spawning. [ 1131.586408] env[62914]: DEBUG nova.compute.manager [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Received event network-changed-5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1131.586566] env[62914]: DEBUG nova.compute.manager [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Refreshing instance network info cache due to event network-changed-5e05c7fc-1efe-4e76-b521-ac8bcee07403. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1131.586736] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Acquiring lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.597474] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832709, 'name': PowerOffVM_Task, 'duration_secs': 0.20009} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.605087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1131.605248] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1131.606044] env[62914]: DEBUG nova.compute.provider_tree [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.608258] env[62914]: DEBUG nova.compute.manager [None req-1c1e15da-5c31-4f0f-8755-f195f748a254 tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Found 0 images (rotation: 2) {{(pid=62914) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1131.609451] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36d79c28-a7f3-423c-8efa-b3477202e885 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.636189] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.636478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.636727] env[62914]: INFO nova.compute.manager [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Shelving [ 1131.639865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1131.639865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1131.639963] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Deleting the datastore file [datastore1] 4af05599-f754-4f81-bcbd-019d7ee58fc5 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1131.641313] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25677293-cbef-427e-82ad-8fe8f0e33851 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.647809] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for the task: (returnval){ [ 1131.647809] env[62914]: value = "task-4832712" [ 1131.647809] env[62914]: _type = "Task" [ 1131.647809] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.656984] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.760943] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 10102941-c31a-4ab1-be5a-801520d49fd7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1131.783317] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.030252] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.030433] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Instance network_info: |[{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1132.030818] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Acquired lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.031106] env[62914]: DEBUG nova.network.neutron [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Refreshing network info cache for port 5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1132.032586] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:12:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e05c7fc-1efe-4e76-b521-ac8bcee07403', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.040870] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Creating folder: Project (1780142384594b1dabc6811b54144d56). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.042057] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32902617-b820-46be-a296-6f56e8aca442 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.058539] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Created folder: Project (1780142384594b1dabc6811b54144d56) in parent group-v941773. [ 1132.058539] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Creating folder: Instances. Parent ref: group-v942084. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.058872] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d63bac84-e9af-45dd-bfe9-dc6c63008651 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.069172] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Created folder: Instances in parent group-v942084. [ 1132.069452] env[62914]: DEBUG oslo.service.loopingcall [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.069697] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1132.070250] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adab0d94-fe5e-48a6-9808-24886518de20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.093847] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.093847] env[62914]: value = "task-4832715" [ 1132.093847] env[62914]: _type = "Task" [ 1132.093847] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.102524] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832715, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.113265] env[62914]: DEBUG nova.scheduler.client.report [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1132.147595] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1132.147894] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c10fde87-065e-4af5-b646-09e5c184142b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.166532] env[62914]: DEBUG oslo_vmware.api [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Task: {'id': task-4832712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174281} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.168089] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.168346] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1132.168540] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1132.168757] env[62914]: INFO nova.compute.manager [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1132.169040] env[62914]: DEBUG oslo.service.loopingcall [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.169367] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1132.169367] env[62914]: value = "task-4832716" [ 1132.169367] env[62914]: _type = "Task" [ 1132.169367] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.169588] env[62914]: DEBUG nova.compute.manager [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1132.169691] env[62914]: DEBUG nova.network.neutron [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1132.181034] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.191357] env[62914]: DEBUG nova.network.neutron [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1132.240247] env[62914]: DEBUG nova.network.neutron [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updated VIF entry in instance network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1132.240809] env[62914]: DEBUG nova.network.neutron [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.242976] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1132.265785] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1ec89a28-d4f3-4324-bf14-c99c5ce05950] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1132.274403] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1132.274403] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1132.274607] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1132.274962] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1132.274962] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1132.275097] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1132.275356] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1132.275523] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1132.275696] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1132.275865] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1132.276052] env[62914]: DEBUG nova.virt.hardware [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1132.277516] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d45ac6-32ca-4fee-bb79-ff9483c81f36 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.294598] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c48ba7e-888a-4f47-856d-4044719c3fdf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.299453] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832710, 'name': ReconfigVM_Task, 'duration_secs': 0.624514} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.299774] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-548d77fc-3693-4e6f-8097-f2402ca0f874/volume-548d77fc-3693-4e6f-8097-f2402ca0f874.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1132.306664] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c319725-8440-4603-be9c-a862123f9e4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.334701] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1132.334701] env[62914]: value = "task-4832717" [ 1132.334701] env[62914]: _type = "Task" [ 1132.334701] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.345692] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832717, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.606971] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832715, 'name': CreateVM_Task, 'duration_secs': 0.343647} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.607249] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1132.607989] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.608408] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.612423] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1132.612724] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6f21c63-4a3d-4758-9108-d3119962150b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.618194] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1132.618194] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c1979-6558-60e8-de1c-4b59d5424e3a" [ 1132.618194] env[62914]: _type = "Task" [ 1132.618194] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.623902] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.624542] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1132.627184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.998s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.628422] env[62914]: DEBUG nova.objects.instance [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'resources' on Instance uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.634926] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526c1979-6558-60e8-de1c-4b59d5424e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010044} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.635456] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.635540] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.635751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.636056] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.636336] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.637207] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee5cceb6-53c9-4881-9048-84d61f60c8d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.649088] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.649088] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1132.649088] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4aa8b46-b817-40a2-b50d-e414b042e2fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.657029] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1132.657029] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52594ab4-38c8-e816-330a-e192c4b41bae" [ 1132.657029] env[62914]: _type = "Task" [ 1132.657029] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.666388] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52594ab4-38c8-e816-330a-e192c4b41bae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.683562] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832716, 'name': PowerOffVM_Task, 'duration_secs': 0.246813} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.684015] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1132.687016] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904540de-d6ee-4bd3-8e81-41fb771b12f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.705250] env[62914]: DEBUG nova.network.neutron [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.707214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8039d6-aa09-44d1-8bf2-f4cfa2c733c3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.746410] env[62914]: DEBUG oslo_concurrency.lockutils [req-88477761-29bb-4d5e-9e94-553a6daba4ba req-59e53baf-7079-4d43-b0f7-b62073be1757 service nova] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.770504] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: dc99b470-4334-408d-8853-d2e9b9204d04] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1132.847516] env[62914]: DEBUG oslo_vmware.api [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832717, 'name': ReconfigVM_Task, 'duration_secs': 0.198357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.847847] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942083', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'name': 'volume-548d77fc-3693-4e6f-8097-f2402ca0f874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a', 'attached_at': '', 'detached_at': '', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'serial': '548d77fc-3693-4e6f-8097-f2402ca0f874'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1132.927545] env[62914]: DEBUG nova.network.neutron [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updated VIF entry in instance network info cache for port 5e05c7fc-1efe-4e76-b521-ac8bcee07403. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1132.928030] env[62914]: DEBUG nova.network.neutron [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.986035] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Successfully updated port: 7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1133.131384] env[62914]: DEBUG nova.objects.instance [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'numa_topology' on Instance uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.133196] env[62914]: DEBUG nova.compute.utils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1133.134792] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1133.134944] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1133.168205] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52594ab4-38c8-e816-330a-e192c4b41bae, 'name': SearchDatastore_Task, 'duration_secs': 0.0127} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.169121] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a32e9553-22e9-42a6-8335-4506ccda71d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.175621] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1133.175621] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238df9c-f2b4-d0d4-4427-9a06d66c90dc" [ 1133.175621] env[62914]: _type = "Task" [ 1133.175621] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.183824] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238df9c-f2b4-d0d4-4427-9a06d66c90dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.185421] env[62914]: DEBUG nova.policy [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddc9958565c745e488dc7f3b34af9585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4860bec4a28e4289b7a508f007fff452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1133.211488] env[62914]: INFO nova.compute.manager [-] [instance: 4af05599-f754-4f81-bcbd-019d7ee58fc5] Took 1.04 seconds to deallocate network for instance. [ 1133.218619] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Creating Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1133.219054] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-477bd747-ae05-4f35-b1c9-cf84a262e978 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.227304] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1133.227304] env[62914]: value = "task-4832718" [ 1133.227304] env[62914]: _type = "Task" [ 1133.227304] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.237405] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832718, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.274040] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 45644d9d-1d7d-4c2c-825d-fb3a2f6f2776] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1133.430532] env[62914]: DEBUG oslo_concurrency.lockutils [req-064a0719-eae8-4671-a4f2-7d7d126aca77 req-1fa60b16-cf25-4fe6-8f5c-b7a587607aa3 service nova] Releasing lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.449405] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Successfully created port: 2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1133.488347] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.488451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.488684] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1133.612959] env[62914]: DEBUG nova.compute.manager [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Received event network-vif-plugged-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1133.613233] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.613456] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.613629] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.613804] env[62914]: DEBUG nova.compute.manager [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] No waiting events found dispatching network-vif-plugged-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1133.613970] env[62914]: WARNING nova.compute.manager [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Received unexpected event network-vif-plugged-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 for instance with vm_state building and task_state spawning. [ 1133.614156] env[62914]: DEBUG nova.compute.manager [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Received event network-changed-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1133.614325] env[62914]: DEBUG nova.compute.manager [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Refreshing instance network info cache due to event network-changed-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1133.614489] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Acquiring lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.633375] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.637031] env[62914]: DEBUG nova.objects.base [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1133.638399] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1133.689790] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5238df9c-f2b4-d0d4-4427-9a06d66c90dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009577} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.695650] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.696225] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 74e7896c-8a1f-448d-a44b-e6febfff9000/74e7896c-8a1f-448d-a44b-e6febfff9000.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1133.697470] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c2e936d-130e-4480-8ed3-3ca65d241f11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.707345] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1133.707345] env[62914]: value = "task-4832719" [ 1133.707345] env[62914]: _type = "Task" [ 1133.707345] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.719210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.722166] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.738095] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832718, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.777561] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: b77a3d27-fe9f-49fc-95d1-15fe82762833] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1133.891117] env[62914]: DEBUG nova.objects.instance [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'flavor' on Instance uuid f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.916838] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deeb465-e789-462b-90e9-0a2561a4f0e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.926283] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2c6a01-6ac1-4e3f-964f-d567b173cd1a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.960494] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271a9f0f-712f-4ece-b6db-5b6bad777507 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.971155] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57293ff2-1a3e-4463-9a31-126615c4af3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.990633] env[62914]: DEBUG nova.compute.provider_tree [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.026032] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.201545] env[62914]: DEBUG nova.network.neutron [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Updating instance_info_cache with network_info: [{"id": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "address": "fa:16:3e:72:f2:73", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7028ef5c-ed", "ovs_interfaceid": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.224296] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832719, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.238825] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832718, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.282063] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: baf28ebf-3ab8-465c-a13b-705ccf3510dc] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1134.397532] env[62914]: DEBUG oslo_concurrency.lockutils [None req-956dc502-e52f-48f4-a65d-d1d7ba177bef tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.846s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.498508] env[62914]: DEBUG nova.scheduler.client.report [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1134.647460] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1134.673153] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1134.673480] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1134.674133] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.674133] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1134.674133] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.674366] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1134.674503] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1134.675355] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1134.675355] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1134.675355] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1134.675355] env[62914]: DEBUG nova.virt.hardware [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1134.676077] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d664f7-8da1-4604-a581-e72976a93251 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.685686] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4d7152-2047-4a8f-bf4b-29c3c0c88ef8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.704032] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.704366] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Instance network_info: |[{"id": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "address": "fa:16:3e:72:f2:73", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7028ef5c-ed", "ovs_interfaceid": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1134.704767] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Acquired lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.704953] env[62914]: DEBUG nova.network.neutron [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Refreshing network info cache for port 7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1134.706249] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:f2:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7028ef5c-ed0b-41c9-9ddb-2662c3a38b54', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1134.714868] env[62914]: DEBUG oslo.service.loopingcall [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1134.715472] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1134.719291] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2a2cc76-1b6b-44ba-bba0-f0249eec3c7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.743411] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548197} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.745319] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 74e7896c-8a1f-448d-a44b-e6febfff9000/74e7896c-8a1f-448d-a44b-e6febfff9000.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1134.745615] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.745885] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1134.745885] env[62914]: value = "task-4832720" [ 1134.745885] env[62914]: _type = "Task" [ 1134.745885] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.746529] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9686e074-f823-418d-97c1-60bf5da1d2c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.755760] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832718, 'name': CreateSnapshot_Task, 'duration_secs': 1.205565} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.760191] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Created Snapshot of the VM instance {{(pid=62914) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1134.760493] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832720, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.761143] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1134.761143] env[62914]: value = "task-4832721" [ 1134.761143] env[62914]: _type = "Task" [ 1134.761143] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.761869] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f78144-3ccd-42e3-af68-a61885cf6f64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.785035] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: b477cd62-49c2-4e3c-98ea-b4154dda4986] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1134.788301] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.885413] env[62914]: DEBUG nova.compute.manager [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Received event network-vif-plugged-2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1134.885729] env[62914]: DEBUG oslo_concurrency.lockutils [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.886048] env[62914]: DEBUG oslo_concurrency.lockutils [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.886298] env[62914]: DEBUG oslo_concurrency.lockutils [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.886532] env[62914]: DEBUG nova.compute.manager [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] No waiting events found dispatching network-vif-plugged-2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1134.886822] env[62914]: WARNING nova.compute.manager [req-671f9a3e-37c0-4048-8414-40315cf50a8b req-c1a260de-fa9b-419b-9578-ccfd460be754 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Received unexpected event network-vif-plugged-2c6def1a-051c-4671-bee1-4eeefcd24ae3 for instance with vm_state building and task_state spawning. [ 1135.004021] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.377s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.007297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.999s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.007553] env[62914]: DEBUG nova.objects.instance [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'resources' on Instance uuid 67ecc3a1-03b0-4881-b5c4-9c4fa244b292 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.055289] env[62914]: INFO nova.compute.manager [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Rescuing [ 1135.055642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.055833] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.056032] env[62914]: DEBUG nova.network.neutron [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.258422] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832720, 'name': CreateVM_Task, 'duration_secs': 0.347723} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.258603] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1135.259305] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.259478] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.259798] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1135.260073] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56c87699-caef-462a-9273-b8f5d6c473c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.265137] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1135.265137] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b995c8-f822-ceb9-e8f5-09017963819b" [ 1135.265137] env[62914]: _type = "Task" [ 1135.265137] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.276667] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b995c8-f822-ceb9-e8f5-09017963819b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.279374] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074553} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.279623] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.280369] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e189f71b-874b-4262-9038-a56720d49e9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.296347] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1d74504f-b641-42c6-a420-c80614d69b23] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1135.305202] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Creating linked-clone VM from snapshot {{(pid=62914) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1135.314542] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 74e7896c-8a1f-448d-a44b-e6febfff9000/74e7896c-8a1f-448d-a44b-e6febfff9000.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.317778] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a7dcdd09-0805-4690-b603-3f7ec2f6f3bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.320754] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cd01b1c-c025-4489-a915-17c07760a9e8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.341723] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1135.341723] env[62914]: value = "task-4832723" [ 1135.341723] env[62914]: _type = "Task" [ 1135.341723] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.343153] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1135.343153] env[62914]: value = "task-4832722" [ 1135.343153] env[62914]: _type = "Task" [ 1135.343153] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.355103] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832722, 'name': CloneVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.358285] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.456428] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Successfully updated port: 2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1135.489060] env[62914]: DEBUG nova.network.neutron [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Updated VIF entry in instance network info cache for port 7028ef5c-ed0b-41c9-9ddb-2662c3a38b54. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1135.489632] env[62914]: DEBUG nova.network.neutron [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Updating instance_info_cache with network_info: [{"id": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "address": "fa:16:3e:72:f2:73", "network": {"id": "a32e9cb7-041b-4715-8dbe-76f60bdac205", "bridge": "br-int", "label": "tempest-ServersTestJSON-1220500586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b19293a423174c20963c000441db100e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7028ef5c-ed", "ovs_interfaceid": "7028ef5c-ed0b-41c9-9ddb-2662c3a38b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.519769] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e22cdccc-1c04-4a1d-b6a8-8a71f11c7552 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.942s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.521253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.888s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.521594] env[62914]: INFO nova.compute.manager [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Unshelving [ 1135.643066] env[62914]: DEBUG nova.compute.manager [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Received event network-changed-2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1135.643454] env[62914]: DEBUG nova.compute.manager [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Refreshing instance network info cache due to event network-changed-2c6def1a-051c-4671-bee1-4eeefcd24ae3. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1135.643774] env[62914]: DEBUG oslo_concurrency.lockutils [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] Acquiring lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.643974] env[62914]: DEBUG oslo_concurrency.lockutils [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] Acquired lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.644272] env[62914]: DEBUG nova.network.neutron [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Refreshing network info cache for port 2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1135.787242] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52b995c8-f822-ceb9-e8f5-09017963819b, 'name': SearchDatastore_Task, 'duration_secs': 0.011313} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.789071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.789404] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1135.789670] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.789851] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.790086] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1135.791051] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187d9dc8-6d50-4f88-9772-a5686ca29d71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.794427] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a32c691d-b79f-4cc2-95ed-8a3228af1c12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.802919] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed50d19-494c-4d82-bf13-422be209f048 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.807355] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1135.807549] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1135.808762] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea0378e-35a9-4ee6-ad60-d3b40eb91aac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.836055] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 1fb67ac1-c0b7-48b9-8562-d457d46709bc] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1135.841826] env[62914]: DEBUG nova.network.neutron [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.845814] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd33cb51-863d-476c-9357-6c3aba83531e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.854293] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1135.854293] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ece34f-831a-f2fb-8732-fc94c69930d2" [ 1135.854293] env[62914]: _type = "Task" [ 1135.854293] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.869209] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832723, 'name': ReconfigVM_Task, 'duration_secs': 0.303598} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.870816] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9e1da4-63f9-4cdd-a325-369be2113750 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.876536] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 74e7896c-8a1f-448d-a44b-e6febfff9000/74e7896c-8a1f-448d-a44b-e6febfff9000.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.883368] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd6eeec3-86a0-4a0e-8842-04278ac313eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.885012] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ece34f-831a-f2fb-8732-fc94c69930d2, 'name': SearchDatastore_Task, 'duration_secs': 0.014076} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.885246] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832722, 'name': CloneVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.886872] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf1b9909-08b6-4d28-8a70-52bffb1fb1d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.897535] env[62914]: DEBUG nova.compute.provider_tree [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.900659] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1135.900659] env[62914]: value = "task-4832724" [ 1135.900659] env[62914]: _type = "Task" [ 1135.900659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.906429] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1135.906429] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c448a-d119-0c81-10fa-f3c2f24be80d" [ 1135.906429] env[62914]: _type = "Task" [ 1135.906429] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.913779] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832724, 'name': Rename_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.918963] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c448a-d119-0c81-10fa-f3c2f24be80d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.958936] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.992880] env[62914]: DEBUG oslo_concurrency.lockutils [req-ee614c7c-aecf-4c88-b642-3508b3f76d5a req-1ae36ba9-0dd2-4570-bf39-addd9d287fbd service nova] Releasing lock "refresh_cache-4d22f1d3-b37a-4356-a41c-516a19f78538" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.186785] env[62914]: DEBUG nova.network.neutron [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1136.274533] env[62914]: DEBUG nova.network.neutron [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.342156] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 2d48056c-d38f-4be1-b28b-71da14607870] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1136.350443] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.363048] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832722, 'name': CloneVM_Task} progress is 95%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.402452] env[62914]: DEBUG nova.scheduler.client.report [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1136.421755] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832724, 'name': Rename_Task, 'duration_secs': 0.161666} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.422199] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c448a-d119-0c81-10fa-f3c2f24be80d, 'name': SearchDatastore_Task, 'duration_secs': 0.017431} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.422468] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1136.422745] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.422991] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4d22f1d3-b37a-4356-a41c-516a19f78538/4d22f1d3-b37a-4356-a41c-516a19f78538.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1136.423276] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a52839da-0d7b-4525-9d2a-39b185638fa8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.424885] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9250de67-f5f4-4a39-ae3c-afb250c7f521 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.435904] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1136.435904] env[62914]: value = "task-4832726" [ 1136.435904] env[62914]: _type = "Task" [ 1136.435904] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.437277] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1136.437277] env[62914]: value = "task-4832725" [ 1136.437277] env[62914]: _type = "Task" [ 1136.437277] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.456784] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.460771] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832725, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.556145] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.778570] env[62914]: DEBUG oslo_concurrency.lockutils [req-148413b4-4ee6-4b8f-b43a-466336b1aa53 req-28399e1e-860c-4ae8-b4ca-016dd8c87152 service nova] Releasing lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.779110] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.779390] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1136.845849] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 7d8287f9-10be-4834-8b7a-1b764145d1c3] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1136.866806] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832722, 'name': CloneVM_Task, 'duration_secs': 1.451019} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.867195] env[62914]: INFO nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Created linked-clone VM from snapshot [ 1136.868199] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdbb97e-3d4f-4cdf-af4b-0295aa67dabe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.877952] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Uploading image 0c7a66ae-a3fb-406e-9993-5953aff2f722 {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1136.891306] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1136.891534] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-337edf62-6be5-4270-8882-957be74385e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.904577] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1136.904577] env[62914]: value = "task-4832727" [ 1136.904577] env[62914]: _type = "Task" [ 1136.904577] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.912932] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1136.912932] env[62914]: value = "vm-942089" [ 1136.912932] env[62914]: _type = "VirtualMachine" [ 1136.912932] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1136.914239] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.916885] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a03cb629-43c8-4d2e-94c2-0ca8f1517bf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.923216] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.205s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.923535] env[62914]: DEBUG nova.objects.instance [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lazy-loading 'resources' on Instance uuid 4af05599-f754-4f81-bcbd-019d7ee58fc5 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.925022] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.932708] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease: (returnval){ [ 1136.932708] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ffceaf-7c58-3e9c-c071-81b72573e4ef" [ 1136.932708] env[62914]: _type = "HttpNfcLease" [ 1136.932708] env[62914]: } obtained for exporting VM: (result){ [ 1136.932708] env[62914]: value = "vm-942089" [ 1136.932708] env[62914]: _type = "VirtualMachine" [ 1136.932708] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1136.933396] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the lease: (returnval){ [ 1136.933396] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ffceaf-7c58-3e9c-c071-81b72573e4ef" [ 1136.933396] env[62914]: _type = "HttpNfcLease" [ 1136.933396] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1136.943985] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1136.943985] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ffceaf-7c58-3e9c-c071-81b72573e4ef" [ 1136.943985] env[62914]: _type = "HttpNfcLease" [ 1136.943985] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1136.954909] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832726, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.958851] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832725, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.960016] env[62914]: INFO nova.scheduler.client.report [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted allocations for instance 67ecc3a1-03b0-4881-b5c4-9c4fa244b292 [ 1137.320104] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1137.349445] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 29a177e4-b5d7-4249-8fc5-2316f6891536] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1137.415700] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.448656] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1137.448656] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ffceaf-7c58-3e9c-c071-81b72573e4ef" [ 1137.448656] env[62914]: _type = "HttpNfcLease" [ 1137.448656] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1137.449378] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548745} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.455788] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1137.455788] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ffceaf-7c58-3e9c-c071-81b72573e4ef" [ 1137.455788] env[62914]: _type = "HttpNfcLease" [ 1137.455788] env[62914]: }. {{(pid=62914) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1137.456024] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4d22f1d3-b37a-4356-a41c-516a19f78538/4d22f1d3-b37a-4356-a41c-516a19f78538.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1137.456253] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1137.457196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac57cc64-a3d7-4d50-b137-48e552f616d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.461781] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9787cf29-40ce-4114-a3a9-5ab50f8106b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.478514] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1137.478711] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk for reading. {{(pid=62914) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1137.480183] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1137.480183] env[62914]: value = "task-4832729" [ 1137.480183] env[62914]: _type = "Task" [ 1137.480183] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.480416] env[62914]: DEBUG oslo_vmware.api [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832725, 'name': PowerOnVM_Task, 'duration_secs': 0.556945} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.481117] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69130e06-ba06-4492-97fc-f06c1419a294 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "67ecc3a1-03b0-4881-b5c4-9c4fa244b292" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.211s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.484562] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1137.484740] env[62914]: INFO nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1137.484923] env[62914]: DEBUG nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1137.489689] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5a5263-6dc3-4e36-8cd8-9527b63c79d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.550106] env[62914]: DEBUG nova.network.neutron [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [{"id": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "address": "fa:16:3e:d3:1d:70", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6def1a-05", "ovs_interfaceid": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.557727] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.593113] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e0d71866-4043-43a7-a42f-2b34d5fddf14 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.744769] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78608a7-b9fe-4c18-9300-5a94f4702bda {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.753031] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea74e06a-a9de-4919-a831-68298bc43f68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.789692] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7603b77-064b-4bfe-a02b-68c3ad507d84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.798334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dab660-ac18-4863-a394-283a131381be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.817756] env[62914]: DEBUG nova.compute.provider_tree [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.852640] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 76dfbf82-0ed0-4621-890c-060b187b47e0] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1137.916086] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832727, 'name': PowerOffVM_Task, 'duration_secs': 0.714067} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.916421] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1137.917320] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e3a340-82ed-4a84-b0e3-a29dbe67e47d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.941805] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6458df2-264e-48b1-b3f3-ed8307ef230c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.979490] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1137.979874] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-149aff69-0301-4854-a20b-ac875e1fc6c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.990066] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1137.990066] env[62914]: value = "task-4832730" [ 1137.990066] env[62914]: _type = "Task" [ 1137.990066] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.997929] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144345} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.999030] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.000578] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d20265c-a28f-42fa-bc00-54b4fb4dd02c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.009489] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1138.009881] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1138.010494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.010634] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.010923] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1138.022824] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a55aadc5-7dd7-442f-8fc3-54377aff1b2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.035465] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 4d22f1d3-b37a-4356-a41c-516a19f78538/4d22f1d3-b37a-4356-a41c-516a19f78538.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.035986] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09a9b6a3-5203-4232-b3f5-d60cdb67d03e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.054889] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.055566] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Instance network_info: |[{"id": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "address": "fa:16:3e:d3:1d:70", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6def1a-05", "ovs_interfaceid": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1138.056334] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:1d:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6def1a-051c-4671-bee1-4eeefcd24ae3', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.065543] env[62914]: DEBUG oslo.service.loopingcall [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.068521] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1138.073611] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1138.073802] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1138.074805] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1138.074805] env[62914]: value = "task-4832731" [ 1138.074805] env[62914]: _type = "Task" [ 1138.074805] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.076796] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9eb2031f-2b2f-43ea-b743-aec86263854b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.096859] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-133e65f2-4e08-488e-aebd-5440e2a8edd2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.100522] env[62914]: INFO nova.compute.manager [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Took 13.84 seconds to build instance. [ 1138.115619] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1138.115619] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ead39-61cb-1f52-5de3-e09b6f6e94f7" [ 1138.115619] env[62914]: _type = "Task" [ 1138.115619] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.116094] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832731, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.117670] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.117670] env[62914]: value = "task-4832732" [ 1138.117670] env[62914]: _type = "Task" [ 1138.117670] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.133664] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832732, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.137322] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ead39-61cb-1f52-5de3-e09b6f6e94f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.146675] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.146960] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.324591] env[62914]: DEBUG nova.scheduler.client.report [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.357688] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 4cea2bd1-a238-4fb6-bc47-719894461228] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1138.606705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5426d149-aeb5-47a4-af00-46406e83ea3b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.358s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.612274] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832731, 'name': ReconfigVM_Task, 'duration_secs': 0.532689} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.612730] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 4d22f1d3-b37a-4356-a41c-516a19f78538/4d22f1d3-b37a-4356-a41c-516a19f78538.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.613601] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37ecdb99-2209-477f-84c2-87379d12d67d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.624711] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1138.624711] env[62914]: value = "task-4832733" [ 1138.624711] env[62914]: _type = "Task" [ 1138.624711] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.637262] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522ead39-61cb-1f52-5de3-e09b6f6e94f7, 'name': SearchDatastore_Task, 'duration_secs': 0.031742} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.638384] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7038c6b8-face-41eb-bb48-0ff37bd7d2df {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.647839] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832733, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.648607] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832732, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.651084] env[62914]: INFO nova.compute.manager [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Detaching volume 1781e247-2b2e-4cd0-b9da-898a7d3844dd [ 1138.655055] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1138.655055] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52871b27-58c4-2f18-91a7-643240ce5060" [ 1138.655055] env[62914]: _type = "Task" [ 1138.655055] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.665290] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52871b27-58c4-2f18-91a7-643240ce5060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.720116] env[62914]: INFO nova.virt.block_device [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Attempting to driver detach volume 1781e247-2b2e-4cd0-b9da-898a7d3844dd from mountpoint /dev/sdb [ 1138.720512] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1138.720778] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942044', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'name': 'volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '06e8b438-01ef-481f-8e27-2faa01bb97aa', 'attached_at': '', 'detached_at': '', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'serial': '1781e247-2b2e-4cd0-b9da-898a7d3844dd'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1138.724022] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7b0546-2663-4658-b298-7d8a37de9a03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.750280] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7198fa14-34ca-421e-ac54-f04cf2784aec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.758908] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451217b4-ab4e-4268-8a19-64205541aede {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.783868] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f195fa-f102-416f-8943-1514a95935f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.804894] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] The volume has not been displaced from its original location: [datastore1] volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd/volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1138.811340] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1138.811882] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a02f1e0-d5d3-4444-b0fa-f73382166a04 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.835987] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.838837] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1138.838837] env[62914]: value = "task-4832734" [ 1138.838837] env[62914]: _type = "Task" [ 1138.838837] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.839701] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.284s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.841424] env[62914]: DEBUG nova.objects.instance [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'pci_requests' on Instance uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.854017] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832734, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.861458] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 6bdcd778-0942-41e7-a6fb-7c3413d34ef7] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1138.865286] env[62914]: INFO nova.scheduler.client.report [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Deleted allocations for instance 4af05599-f754-4f81-bcbd-019d7ee58fc5 [ 1139.009232] env[62914]: DEBUG nova.compute.manager [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Received event network-changed-5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1139.009447] env[62914]: DEBUG nova.compute.manager [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Refreshing instance network info cache due to event network-changed-5e05c7fc-1efe-4e76-b521-ac8bcee07403. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1139.009963] env[62914]: DEBUG oslo_concurrency.lockutils [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] Acquiring lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.010182] env[62914]: DEBUG oslo_concurrency.lockutils [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] Acquired lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.010518] env[62914]: DEBUG nova.network.neutron [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Refreshing network info cache for port 5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1139.136459] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832732, 'name': CreateVM_Task, 'duration_secs': 0.647695} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.137167] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1139.137956] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.138272] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.138966] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1139.143095] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d0659c-2a9c-4910-b3f1-38691e3ef432 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.145217] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832733, 'name': Rename_Task, 'duration_secs': 0.255655} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.146082] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1139.146698] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3bc36dd-6d4c-4ea9-b331-4cac1bdffc12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.150198] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1139.150198] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e8cc37-b7a6-7b52-5fa1-4d11943d3ee2" [ 1139.150198] env[62914]: _type = "Task" [ 1139.150198] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.155583] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1139.155583] env[62914]: value = "task-4832735" [ 1139.155583] env[62914]: _type = "Task" [ 1139.155583] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.166178] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e8cc37-b7a6-7b52-5fa1-4d11943d3ee2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.175615] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52871b27-58c4-2f18-91a7-643240ce5060, 'name': SearchDatastore_Task, 'duration_secs': 0.028715} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.175873] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.176179] env[62914]: DEBUG oslo_concurrency.lockutils [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.176449] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. {{(pid=62914) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1139.176736] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83751586-b0c6-473b-bb16-f9a765cfb44c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.185255] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1139.185255] env[62914]: value = "task-4832736" [ 1139.185255] env[62914]: _type = "Task" [ 1139.185255] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.195696] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.350274] env[62914]: DEBUG nova.objects.instance [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'numa_topology' on Instance uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.362891] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.367465] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 2f7bc586-af68-4d9d-81e2-8247371dfa7f] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1139.374495] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13ac2162-552e-4f1f-9a94-8e7b24f78d7b tempest-ServersAaction247Test-1001896833 tempest-ServersAaction247Test-1001896833-project-member] Lock "4af05599-f754-4f81-bcbd-019d7ee58fc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.397s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.678020] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832735, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.678362] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52e8cc37-b7a6-7b52-5fa1-4d11943d3ee2, 'name': SearchDatastore_Task, 'duration_secs': 0.023864} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.678696] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.678973] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.679287] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.679455] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.679762] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.680018] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eaf362a-44c3-4e65-80c9-3ab889d9051d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.698489] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832736, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.701591] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.702072] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1139.703389] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90d7e87f-1924-4b64-9b47-41c1f0ca122b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.713587] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1139.713587] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528942fc-150f-e40e-10cd-86d5afd4998b" [ 1139.713587] env[62914]: _type = "Task" [ 1139.713587] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.727826] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528942fc-150f-e40e-10cd-86d5afd4998b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.860345] env[62914]: INFO nova.compute.claims [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1139.863602] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.875284] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: aede8da7-8bf2-4963-b08b-6e06007614a5] Instance has had 0 of 5 cleanup attempts {{(pid=62914) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1139.953216] env[62914]: DEBUG nova.network.neutron [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updated VIF entry in instance network info cache for port 5e05c7fc-1efe-4e76-b521-ac8bcee07403. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1139.953924] env[62914]: DEBUG nova.network.neutron [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.170222] env[62914]: DEBUG oslo_vmware.api [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832735, 'name': PowerOnVM_Task, 'duration_secs': 0.851197} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.171250] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1140.172160] env[62914]: INFO nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1140.172160] env[62914]: DEBUG nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1140.173542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78c386e-10db-4576-8d94-e2f12fc09113 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.203834] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.958705} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.203834] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk. [ 1140.204862] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd4e10a-8f3d-4bdc-8a08-194c03556a88 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.253295] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.258880] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1b39cff-4c97-48e1-9be9-fdf4e015a4a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.288959] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528942fc-150f-e40e-10cd-86d5afd4998b, 'name': SearchDatastore_Task, 'duration_secs': 0.068592} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.291296] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1140.291296] env[62914]: value = "task-4832737" [ 1140.291296] env[62914]: _type = "Task" [ 1140.291296] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.291634] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cb5e3ce-5b25-49a9-b782-f8dabb992fe7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.302645] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1140.302645] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523422b2-b610-964b-ca82-495ceb1a58b6" [ 1140.302645] env[62914]: _type = "Task" [ 1140.302645] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.305872] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832737, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.315744] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523422b2-b610-964b-ca82-495ceb1a58b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.358190] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832734, 'name': ReconfigVM_Task, 'duration_secs': 1.321666} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.358544] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1140.363694] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e474de4a-d5b9-4610-bb3d-2c331524a5f2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.379185] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.379438] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Cleaning up deleted instances with incomplete migration {{(pid=62914) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11342}} [ 1140.384705] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1140.384705] env[62914]: value = "task-4832738" [ 1140.384705] env[62914]: _type = "Task" [ 1140.384705] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.398287] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832738, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.456892] env[62914]: DEBUG oslo_concurrency.lockutils [req-72003d77-5ab5-4a30-a05c-fb888e4b05bd req-be1fb5b5-51ed-48fb-8f9d-e0eb9fb10fb8 service nova] Releasing lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.696214] env[62914]: INFO nova.compute.manager [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Took 16.02 seconds to build instance. [ 1140.806576] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.817271] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523422b2-b610-964b-ca82-495ceb1a58b6, 'name': SearchDatastore_Task, 'duration_secs': 0.026168} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.817636] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.817924] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1140.818281] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-687eda37-b271-486a-bed0-dbae2aaa4a11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.829767] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1140.829767] env[62914]: value = "task-4832739" [ 1140.829767] env[62914]: _type = "Task" [ 1140.829767] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.841561] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.881939] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.897434] env[62914]: DEBUG oslo_vmware.api [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832738, 'name': ReconfigVM_Task, 'duration_secs': 0.23252} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.898992] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942044', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'name': 'volume-1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '06e8b438-01ef-481f-8e27-2faa01bb97aa', 'attached_at': '', 'detached_at': '', 'volume_id': '1781e247-2b2e-4cd0-b9da-898a7d3844dd', 'serial': '1781e247-2b2e-4cd0-b9da-898a7d3844dd'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1141.122959] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d79342-6751-4e87-9551-35401adad455 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.131730] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d82d35f-edb1-47dc-9948-ebb7d80f58d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.164136] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b241136b-c473-4a06-a950-bf727825ad4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.173510] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc5aa33-b81d-4ff8-a0e8-010375240b96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.189496] env[62914]: DEBUG nova.compute.provider_tree [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.198778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7d09592d-7b31-4713-9ed1-3fc97cace7ab tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.532s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.308169] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832737, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.343185] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832739, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.457860] env[62914]: DEBUG nova.objects.instance [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid 06e8b438-01ef-481f-8e27-2faa01bb97aa {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.693038] env[62914]: DEBUG nova.scheduler.client.report [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1141.789433] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.789433] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.789433] env[62914]: DEBUG nova.compute.manager [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1141.790329] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8f7d43-55ad-4193-b0d2-db67a0c877ea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.802513] env[62914]: DEBUG nova.compute.manager [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1141.803240] env[62914]: DEBUG nova.objects.instance [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'flavor' on Instance uuid 4d22f1d3-b37a-4356-a41c-516a19f78538 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.812688] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832737, 'name': ReconfigVM_Task, 'duration_secs': 1.055284} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.812992] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a/75c43660-b52b-450e-ba36-0f721e14bc6c-rescue.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.814017] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f62403-557a-42da-b796-73b27e8f25e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.850626] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a39a403-335d-4fbb-956e-281b7c806485 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.868968] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832739, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.872192] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1141.872192] env[62914]: value = "task-4832740" [ 1141.872192] env[62914]: _type = "Task" [ 1141.872192] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.881751] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832740, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.198354] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.358s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.235107] env[62914]: INFO nova.network.neutron [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating port 8d6d259f-1ebc-4e49-b6f8-114f414606f7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1142.308767] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1142.309277] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95969b1b-544a-4226-bb14-c080eda72f7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.317779] env[62914]: DEBUG oslo_vmware.api [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1142.317779] env[62914]: value = "task-4832741" [ 1142.317779] env[62914]: _type = "Task" [ 1142.317779] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.328692] env[62914]: DEBUG oslo_vmware.api [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.351664] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832739, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.114218} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.351981] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1142.352226] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.352529] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc552c68-bc5f-43b9-a1d0-3d4a200bcc22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.360836] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1142.360836] env[62914]: value = "task-4832742" [ 1142.360836] env[62914]: _type = "Task" [ 1142.360836] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.372424] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832742, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.382708] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.384984] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.385202] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.468029] env[62914]: DEBUG oslo_concurrency.lockutils [None req-ffd22302-fea5-4225-a9ae-d9420babc74e tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.320s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.828057] env[62914]: DEBUG oslo_vmware.api [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832741, 'name': PowerOffVM_Task, 'duration_secs': 0.228272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.828438] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1142.828673] env[62914]: DEBUG nova.compute.manager [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1142.829531] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306fb2bf-96d6-424f-a15a-5c36a2e61801 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.871410] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832742, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138545} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.871763] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.872628] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152045c6-62ff-499c-902b-e7174214ca35 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.886094] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832740, 'name': ReconfigVM_Task, 'duration_secs': 0.595511} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.895597] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1142.915156] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.915891] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9722e4b0-3e49-4a86-a94e-90f1bbd341eb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.918127] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92c4df63-15da-4847-bfda-a36069881c7b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.933604] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.933794] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1142.945861] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1142.945861] env[62914]: value = "task-4832743" [ 1142.945861] env[62914]: _type = "Task" [ 1142.945861] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.947680] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1142.947680] env[62914]: value = "task-4832744" [ 1142.947680] env[62914]: _type = "Task" [ 1142.947680] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.961419] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.965436] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832744, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.342690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7344c56c-78be-4009-8a22-123d5da0dc3e tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.465051] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832743, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.465347] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.610134] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.610505] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.610720] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.610992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.611225] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.616142] env[62914]: INFO nova.compute.manager [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Terminating instance [ 1143.618652] env[62914]: DEBUG nova.compute.manager [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1143.619032] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1143.619828] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec41ae8-3a0b-4a63-a740-202cceb92463 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.628211] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1143.628589] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1807b0f4-c55c-4ef8-bf3d-85877cdfc794 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.637301] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1143.637301] env[62914]: value = "task-4832745" [ 1143.637301] env[62914]: _type = "Task" [ 1143.637301] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.651908] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.755404] env[62914]: DEBUG nova.compute.manager [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1143.755502] env[62914]: DEBUG oslo_concurrency.lockutils [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.755807] env[62914]: DEBUG oslo_concurrency.lockutils [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.755871] env[62914]: DEBUG oslo_concurrency.lockutils [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.756061] env[62914]: DEBUG nova.compute.manager [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] No waiting events found dispatching network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1143.756300] env[62914]: WARNING nova.compute.manager [req-5777e1b9-198a-4352-a027-f94cbe3c2181 req-6d7f3899-0f08-4f4e-aadd-82806bb22369 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received unexpected event network-vif-plugged-8d6d259f-1ebc-4e49-b6f8-114f414606f7 for instance with vm_state shelved_offloaded and task_state spawning. [ 1143.876639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.876830] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.877019] env[62914]: DEBUG nova.network.neutron [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1143.961981] env[62914]: DEBUG oslo_vmware.api [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832743, 'name': PowerOnVM_Task, 'duration_secs': 0.749274} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.965814] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1143.967261] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832744, 'name': ReconfigVM_Task, 'duration_secs': 0.676159} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.968279] env[62914]: DEBUG nova.compute.manager [None req-d7eaa68d-6b77-4711-a091-409fb84b1610 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1143.968585] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.969681] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3488c905-338a-4d2a-9271-b68ce8586b85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.972863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.972992] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.973148] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1143.974373] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fe76ffd-2364-4ede-a5de-cedd143e1741 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.983964] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1143.983964] env[62914]: value = "task-4832746" [ 1143.983964] env[62914]: _type = "Task" [ 1143.983964] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.994258] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832746, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.147128] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832745, 'name': PowerOffVM_Task, 'duration_secs': 0.31257} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.147428] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1144.147608] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1144.148273] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3e9f363-4757-46cd-9d8b-32aeaff682d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.226350] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1144.226821] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1144.227172] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleting the datastore file [datastore1] 06e8b438-01ef-481f-8e27-2faa01bb97aa {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.228045] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3139627-518d-4e14-bec3-4aec2322a8f9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.237630] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1144.237630] env[62914]: value = "task-4832748" [ 1144.237630] env[62914]: _type = "Task" [ 1144.237630] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.248728] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.499520] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832746, 'name': Rename_Task, 'duration_secs': 0.281735} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.499911] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1144.500031] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18665f93-c206-480e-8717-dd68dac409a8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.507934] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1144.507934] env[62914]: value = "task-4832749" [ 1144.507934] env[62914]: _type = "Task" [ 1144.507934] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.517237] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.665672] env[62914]: DEBUG nova.network.neutron [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.696017] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.696383] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.696610] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.696796] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.696971] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.699569] env[62914]: INFO nova.compute.manager [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Terminating instance [ 1144.701919] env[62914]: DEBUG nova.compute.manager [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1144.702149] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1144.703441] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07c257-9808-4b8a-afb3-fecf57f1f045 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.712723] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1144.712910] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b053907-51ed-4cef-bab4-d08a752cd817 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.749442] env[62914]: DEBUG oslo_vmware.api [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44686} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.749734] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.749930] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1144.750138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1144.750326] env[62914]: INFO nova.compute.manager [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1144.750587] env[62914]: DEBUG oslo.service.loopingcall [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.750793] env[62914]: DEBUG nova.compute.manager [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1144.750888] env[62914]: DEBUG nova.network.neutron [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1144.784494] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1144.784865] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1144.785158] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore2] 4d22f1d3-b37a-4356-a41c-516a19f78538 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.785627] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa18df8e-8d36-45fd-a898-a9c9f6d2749a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.795472] env[62914]: DEBUG oslo_vmware.api [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1144.795472] env[62914]: value = "task-4832751" [ 1144.795472] env[62914]: _type = "Task" [ 1144.795472] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.807946] env[62914]: DEBUG oslo_vmware.api [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.963247] env[62914]: INFO nova.compute.manager [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Unrescuing [ 1144.963632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.963801] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.963983] env[62914]: DEBUG nova.network.neutron [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1145.021076] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832749, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.169086] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.204305] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9e142567e929f08e2b7c1ea22629ffa4',container_format='bare',created_at=2025-11-25T11:31:54Z,direct_url=,disk_format='vmdk',id=598263a1-d518-4327-a7bf-74fedbc43cae,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-573262505-shelved',owner='adf406f1352240aba2338e64b8f182b4',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-11-25T11:32:12Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1145.204617] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1145.204811] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.205015] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1145.205199] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.205369] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1145.205607] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1145.205795] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1145.205970] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1145.206184] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1145.206371] env[62914]: DEBUG nova.virt.hardware [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1145.207679] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd37776-e2d8-4283-a8b7-b86607ddd1c2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.217364] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496eefa4-242c-4ea3-8571-308590cdb50f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.241120] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:f1:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d6d259f-1ebc-4e49-b6f8-114f414606f7', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1145.249959] env[62914]: DEBUG oslo.service.loopingcall [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1145.253045] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1145.253386] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0da2b055-1ff2-4fdb-8aa4-5787b68bb951 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.286040] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1145.286040] env[62914]: value = "task-4832752" [ 1145.286040] env[62914]: _type = "Task" [ 1145.286040] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.300013] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832752, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.313880] env[62914]: DEBUG oslo_vmware.api [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246528} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.314204] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1145.314430] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1145.314630] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1145.314809] env[62914]: INFO nova.compute.manager [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1145.315091] env[62914]: DEBUG oslo.service.loopingcall [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1145.315306] env[62914]: DEBUG nova.compute.manager [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1145.315399] env[62914]: DEBUG nova.network.neutron [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1145.413308] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.522491] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832749, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.774995] env[62914]: DEBUG nova.compute.manager [req-5c2de21d-d164-4e7e-8cbc-056d202c1451 req-4bca9ef4-bcf1-4626-8e1a-75056a2ab980 service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Received event network-vif-deleted-7028ef5c-ed0b-41c9-9ddb-2662c3a38b54 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1145.775221] env[62914]: INFO nova.compute.manager [req-5c2de21d-d164-4e7e-8cbc-056d202c1451 req-4bca9ef4-bcf1-4626-8e1a-75056a2ab980 service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Neutron deleted interface 7028ef5c-ed0b-41c9-9ddb-2662c3a38b54; detaching it from the instance and deleting it from the info cache [ 1145.775425] env[62914]: DEBUG nova.network.neutron [req-5c2de21d-d164-4e7e-8cbc-056d202c1451 req-4bca9ef4-bcf1-4626-8e1a-75056a2ab980 service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.782133] env[62914]: DEBUG nova.network.neutron [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.783823] env[62914]: INFO nova.compute.manager [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Rebuilding instance [ 1145.806259] env[62914]: DEBUG nova.compute.manager [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1145.806259] env[62914]: DEBUG nova.compute.manager [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing instance network info cache due to event network-changed-8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1145.806259] env[62914]: DEBUG oslo_concurrency.lockutils [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] Acquiring lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.806259] env[62914]: DEBUG oslo_concurrency.lockutils [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] Acquired lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.806259] env[62914]: DEBUG nova.network.neutron [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Refreshing network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1145.813246] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832752, 'name': CreateVM_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.851315] env[62914]: DEBUG nova.compute.manager [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1145.852475] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e663165-4ee3-40b7-85b7-66aabce5a1c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.916561] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.916812] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 1145.917027] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.917536] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.917732] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.917911] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.918100] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.918282] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1145.918475] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.000193] env[62914]: DEBUG nova.network.neutron [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.021868] env[62914]: DEBUG oslo_vmware.api [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832749, 'name': PowerOnVM_Task, 'duration_secs': 1.024646} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.022205] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1146.022446] env[62914]: INFO nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Took 11.37 seconds to spawn the instance on the hypervisor. [ 1146.022673] env[62914]: DEBUG nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1146.023740] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca7c2c6-44a0-4e1c-b3c5-f4bf4bd2688a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.243081] env[62914]: DEBUG nova.network.neutron [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.288887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.289428] env[62914]: DEBUG nova.objects.instance [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'flavor' on Instance uuid f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.291046] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68e44310-5a7f-419b-8103-72d55b22df76 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.304952] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832752, 'name': CreateVM_Task, 'duration_secs': 0.742364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.306521] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1146.310020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.310020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.310020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1146.311657] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ed9410-db92-492a-bee6-aefe0d8db75d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.323058] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a4871df-2e9e-4ba6-9950-26de36c33e1e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.332360] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1146.332360] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb5479-0c6b-689a-cb74-c469ee50289a" [ 1146.332360] env[62914]: _type = "Task" [ 1146.332360] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.344710] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cb5479-0c6b-689a-cb74-c469ee50289a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.362923] env[62914]: DEBUG nova.compute.manager [req-5c2de21d-d164-4e7e-8cbc-056d202c1451 req-4bca9ef4-bcf1-4626-8e1a-75056a2ab980 service nova] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Detach interface failed, port_id=7028ef5c-ed0b-41c9-9ddb-2662c3a38b54, reason: Instance 4d22f1d3-b37a-4356-a41c-516a19f78538 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1146.365965] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1146.366353] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-896ac39d-1d06-4c38-9b74-2c6e1cdef4bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.377080] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1146.377080] env[62914]: value = "task-4832753" [ 1146.377080] env[62914]: _type = "Task" [ 1146.377080] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.386821] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.424491] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.424981] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.425305] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.425572] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1146.426711] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7593740-015a-4921-b8c6-09dce808e7d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.437994] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5179443-1196-4aac-87dc-f75c252528c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.458609] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2722a9f3-a26c-4aee-9868-c2dc5dd8d1b4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.468550] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9558ed1-1292-41ea-a3dd-6fcf49c0609c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.521011] env[62914]: INFO nova.compute.manager [-] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Took 1.77 seconds to deallocate network for instance. [ 1146.521612] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178337MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1146.521854] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.522177] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.547246] env[62914]: INFO nova.compute.manager [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Took 18.27 seconds to build instance. [ 1146.716664] env[62914]: DEBUG nova.network.neutron [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updated VIF entry in instance network info cache for port 8d6d259f-1ebc-4e49-b6f8-114f414606f7. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1146.716920] env[62914]: DEBUG nova.network.neutron [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [{"id": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "address": "fa:16:3e:f8:f1:ff", "network": {"id": "8a91940e-7552-4133-875c-77d1c0a97b22", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1584229980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf406f1352240aba2338e64b8f182b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6d259f-1e", "ovs_interfaceid": "8d6d259f-1ebc-4e49-b6f8-114f414606f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.718990] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "b285198b-aa95-4dcb-99b3-531d09c210d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.719281] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.719607] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.719879] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.720078] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.722345] env[62914]: INFO nova.compute.manager [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Terminating instance [ 1146.727322] env[62914]: DEBUG nova.compute.manager [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1146.727573] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1146.729273] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d3623-1f08-498b-8ed3-8391ba1add4f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.737581] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1146.737894] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3735c614-2f52-40db-a708-9d9c33c791f4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.741625] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1146.745025] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46de39-643c-424a-ad24-3ddb9df73ba4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.745806] env[62914]: INFO nova.compute.manager [-] [instance: 4d22f1d3-b37a-4356-a41c-516a19f78538] Took 1.43 seconds to deallocate network for instance. [ 1146.754699] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1146.754955] env[62914]: ERROR oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk due to incomplete transfer. [ 1146.755214] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1146.755214] env[62914]: value = "task-4832754" [ 1146.755214] env[62914]: _type = "Task" [ 1146.755214] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.755416] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6776eccf-e42a-4a97-8223-ad1a3a320ee2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.769525] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.771599] env[62914]: DEBUG oslo_vmware.rw_handles [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5183d-774c-24c8-7841-013dc2387ff0/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1146.772365] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Uploaded image 0c7a66ae-a3fb-406e-9993-5953aff2f722 to the Glance image server {{(pid=62914) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1146.774687] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Destroying the VM {{(pid=62914) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1146.775022] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-00d8775c-01bd-4630-a5fa-f20d5b5527d0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.783858] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1146.783858] env[62914]: value = "task-4832755" [ 1146.783858] env[62914]: _type = "Task" [ 1146.783858] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.796504] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832755, 'name': Destroy_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.797682] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2c2d31-9c6f-40f6-85d6-5cf7c0358373 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.826180] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1146.826753] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-987f0456-b846-4c4b-b52c-3ade8614f3e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.835327] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1146.835327] env[62914]: value = "task-4832756" [ 1146.835327] env[62914]: _type = "Task" [ 1146.835327] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.855441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.855877] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Processing image 598263a1-d518-4327-a7bf-74fedbc43cae {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.856021] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.856210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.856504] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.857434] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.857655] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-671f2daa-fff4-455c-bec8-5891e03b962e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.869357] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.869707] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1146.870646] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a3f00f9-ee0a-49f3-836f-73590de3e248 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.882285] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1146.882285] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]520717dc-8a2d-b797-53fd-c60583a2639d" [ 1146.882285] env[62914]: _type = "Task" [ 1146.882285] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.890183] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832753, 'name': PowerOffVM_Task, 'duration_secs': 0.2973} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.891055] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1146.892697] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1146.892697] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41b9841f-9f1b-45ad-a954-1ea2ffbd9a13 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.898036] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1146.898363] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Fetch image to [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21/OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1146.898509] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Downloading stream optimized image 598263a1-d518-4327-a7bf-74fedbc43cae to [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21/OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21.vmdk on the data store datastore1 as vApp {{(pid=62914) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1146.898776] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Downloading image file data 598263a1-d518-4327-a7bf-74fedbc43cae to the ESX as VM named 'OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21' {{(pid=62914) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1146.904954] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1146.904954] env[62914]: value = "task-4832757" [ 1146.904954] env[62914]: _type = "Task" [ 1146.904954] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.917271] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1146.917523] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1146.917750] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942060', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'name': 'volume-d801e165-dc65-4457-9762-f209bc342e87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19f21caa-7d96-4526-bb12-768c4fe4d23e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'serial': 'd801e165-dc65-4457-9762-f209bc342e87'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1146.918792] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac7f82a-73b2-4299-b7bd-5438939f5593 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.941732] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25f48c9-7232-4aca-8a89-4ca4aea628ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.968720] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560ce5af-dff7-4e28-a232-6de4ccfa101e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.991034] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdc8334-e6c9-4333-856f-c87b6398df00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.007741] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] The volume has not been displaced from its original location: [datastore2] volume-d801e165-dc65-4457-9762-f209bc342e87/volume-d801e165-dc65-4457-9762-f209bc342e87.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1147.013161] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.013586] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48acfba2-0b47-4902-a1f5-76ca12218960 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.027727] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1147.027727] env[62914]: value = "resgroup-9" [ 1147.027727] env[62914]: _type = "ResourcePool" [ 1147.027727] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1147.027989] env[62914]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c38f86d4-15bc-4df0-a77e-4c8104f9d56f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.054129] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.056216] env[62914]: DEBUG oslo_concurrency.lockutils [None req-69c111c5-e326-45fc-b4e2-1651991ef730 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.784s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.057138] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1147.057138] env[62914]: value = "task-4832758" [ 1147.057138] env[62914]: _type = "Task" [ 1147.057138] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.063115] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease: (returnval){ [ 1147.063115] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1147.063115] env[62914]: _type = "HttpNfcLease" [ 1147.063115] env[62914]: } obtained for vApp import into resource pool (val){ [ 1147.063115] env[62914]: value = "resgroup-9" [ 1147.063115] env[62914]: _type = "ResourcePool" [ 1147.063115] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1147.063492] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the lease: (returnval){ [ 1147.063492] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1147.063492] env[62914]: _type = "HttpNfcLease" [ 1147.063492] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1147.075736] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832758, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.082093] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1147.082093] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1147.082093] env[62914]: _type = "HttpNfcLease" [ 1147.082093] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1147.847453] env[62914]: DEBUG oslo_concurrency.lockutils [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] Releasing lock "refresh_cache-e730b472-fca8-4041-a00c-91bee25232f7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.847833] env[62914]: DEBUG nova.compute.manager [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Received event network-vif-deleted-91711c66-4bec-40d9-b1be-9603bbad7e46 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1147.847915] env[62914]: INFO nova.compute.manager [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Neutron deleted interface 91711c66-4bec-40d9-b1be-9603bbad7e46; detaching it from the instance and deleting it from the info cache [ 1147.848101] env[62914]: DEBUG nova.network.neutron [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.849798] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.853161] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating resource usage from migration 2877ce41-ea0e-4f50-ba01-f1780ded0468 [ 1147.856581] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.856944] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.880564] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27d5b65e-b1d0-4cbc-a1e2-597a46512ecf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.886870] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e061304c-998b-4331-b60d-809916844a6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887044] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887193] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance af141439-1c36-4184-9775-d1e30ee77ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887312] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 06e8b438-01ef-481f-8e27-2faa01bb97aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887434] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887594] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887768] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 3b26b5d7-524a-41af-ab75-a158568e031e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.887908] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance b285198b-aa95-4dcb-99b3-531d09c210d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888056] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888186] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 88acf376-122d-4796-8400-dfc4c7ec45d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888303] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888415] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 74e7896c-8a1f-448d-a44b-e6febfff9000 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888527] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4d22f1d3-b37a-4356-a41c-516a19f78538 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888656] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e730b472-fca8-4041-a00c-91bee25232f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.888794] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration 2877ce41-ea0e-4f50-ba01-f1780ded0468 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1147.888909] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 9673614c-44c9-4348-b528-0bd28c892a11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1147.889143] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1147.889304] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=100GB used_disk=14GB total_vcpus=48 used_vcpus=15 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '15', 'num_vm_active': '11', 'num_task_None': '7', 'num_os_type_None': '15', 'num_proj_b19293a423174c20963c000441db100e': '2', 'io_workload': '2', 'num_proj_d141c01c1d5848eea6ef2b831e431ba5': '2', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '2', 'num_task_deleting': '3', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '1', 'num_proj_d77829ac81cd41f2a4acdd571295ca6d': '3', 'num_vm_rescued': '2', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '1', 'num_proj_adf406f1352240aba2338e64b8f182b4': '1', 'num_task_unrescuing': '1', 'num_task_shelving_image_uploading': '1', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1', 'num_task_rebuilding': '1', 'num_proj_ceffd38633104c58bbdc3176b7489c92': '1', 'num_proj_1780142384594b1dabc6811b54144d56': '1', 'num_vm_stopped': '1', 'num_task_resize_prep': '1', 'num_proj_4860bec4a28e4289b7a508f007fff452': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1147.896537] env[62914]: DEBUG nova.compute.manager [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1147.913636] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832756, 'name': PowerOffVM_Task, 'duration_secs': 0.728736} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.914222] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832754, 'name': PowerOffVM_Task, 'duration_secs': 0.804112} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.914441] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832758, 'name': ReconfigVM_Task, 'duration_secs': 0.216542} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.914650] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1147.914650] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1147.914650] env[62914]: _type = "HttpNfcLease" [ 1147.914650] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1147.914875] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832755, 'name': Destroy_Task, 'duration_secs': 0.394951} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.915919] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1147.921073] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfiguring VM instance instance-00000069 to detach disk 2002 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.921418] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1147.921585] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1147.921834] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1147.926430] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Destroyed the VM [ 1147.926694] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleting Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1147.930516] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7fff411-4e9e-432c-87b2-ffc03cd80d73 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.945799] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-323839e8-1399-4579-ac6c-fc91d0c9f8a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.947871] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60e74b61-001d-4492-930c-2fbd41080c7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.957582] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-529b93c4-7e5b-4d3e-aa4e-3c92f413a530 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.963256] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6baf668-fa68-4be7-ad16-b5fe9c6a54ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.984659] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1147.984659] env[62914]: value = "task-4832760" [ 1147.984659] env[62914]: _type = "Task" [ 1147.984659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.987324] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1147.987324] env[62914]: value = "task-4832763" [ 1147.987324] env[62914]: _type = "Task" [ 1147.987324] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.987615] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1147.987615] env[62914]: value = "task-4832761" [ 1147.987615] env[62914]: _type = "Task" [ 1147.987615] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.017299] env[62914]: DEBUG nova.compute.manager [req-c79cfb31-db05-48d8-b2d2-b5a1c83ed3c7 req-44b57341-4919-42da-984a-0e6040cbb646 service nova] [instance: 06e8b438-01ef-481f-8e27-2faa01bb97aa] Detach interface failed, port_id=91711c66-4bec-40d9-b1be-9603bbad7e46, reason: Instance 06e8b438-01ef-481f-8e27-2faa01bb97aa could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1148.026650] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.033910] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832763, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.034696] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.185573] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9ea468-f826-4c07-8bbc-0643da8f6b25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.195038] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd253c7-ab14-4ef2-8098-857bf3de2df1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.226245] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edadf013-bc70-4710-ba83-f00f30f67e2c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.234469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40e12a9-ead0-493c-92b7-6c74eff72270 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.248839] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.365631] env[62914]: INFO nova.compute.manager [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Detaching volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 [ 1148.367438] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1148.367438] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1148.367438] env[62914]: _type = "HttpNfcLease" [ 1148.367438] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1148.404040] env[62914]: INFO nova.virt.block_device [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Attempting to driver detach volume 73ef6951-5fab-40ea-bbd6-8971648c87c6 from mountpoint /dev/sdb [ 1148.404322] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1148.404524] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1148.405461] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf0c779-7f4c-42a1-9956-f614690f1d08 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.436254] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cc0742-fba7-412a-97f8-b85ba99a9987 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.444274] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c72584-9951-409a-9c1a-9cbf5a363f43 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.448206] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.467446] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b35fa0b-f49c-49ca-b7a1-b54cb47a6fbb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.484085] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] The volume has not been displaced from its original location: [datastore2] volume-73ef6951-5fab-40ea-bbd6-8971648c87c6/volume-73ef6951-5fab-40ea-bbd6-8971648c87c6.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1148.488832] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1148.489310] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-234c3586-ae2d-4b5b-adcb-9eb33c95c393 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.521551] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832763, 'name': RemoveSnapshot_Task, 'duration_secs': 0.476331} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.521816] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.525664] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleted Snapshot of the VM instance {{(pid=62914) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1148.526043] env[62914]: DEBUG nova.compute.manager [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1148.526402] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1148.526402] env[62914]: value = "task-4832764" [ 1148.526402] env[62914]: _type = "Task" [ 1148.526402] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.526626] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832761, 'name': ReconfigVM_Task, 'duration_secs': 0.21001} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.527357] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6218dd3c-2a12-442d-a211-2ad77379f8a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.529991] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942060', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'name': 'volume-d801e165-dc65-4457-9762-f209bc342e87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19f21caa-7d96-4526-bb12-768c4fe4d23e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd801e165-dc65-4457-9762-f209bc342e87', 'serial': 'd801e165-dc65-4457-9762-f209bc342e87'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1148.530302] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1148.533731] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925164f2-11bf-49cb-b777-034aa08ff0b7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.541965] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.545192] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1148.547645] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07f15c03-29e0-4ce8-8a5c-ca0731ab46bd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.611774] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1148.612065] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1148.612233] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Deleting the datastore file [datastore2] 19f21caa-7d96-4526-bb12-768c4fe4d23e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.612921] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba70dd45-d7ca-4956-8414-564d9b37fbe3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.620468] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for the task: (returnval){ [ 1148.620468] env[62914]: value = "task-4832766" [ 1148.620468] env[62914]: _type = "Task" [ 1148.620468] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.629231] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.755027] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1148.865062] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1148.865062] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1148.865062] env[62914]: _type = "HttpNfcLease" [ 1148.865062] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1149.001526] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.040445] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.053023] env[62914]: INFO nova.compute.manager [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Shelve offloading [ 1149.054876] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1149.055137] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fef2bee6-8469-4c40-8226-4ff75c443725 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.061969] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1149.061969] env[62914]: value = "task-4832767" [ 1149.061969] env[62914]: _type = "Task" [ 1149.061969] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.070308] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.131846] env[62914]: DEBUG oslo_vmware.api [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Task: {'id': task-4832766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080732} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.132137] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.132324] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1149.132506] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1149.194181] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1149.194645] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f05fd494-db88-4080-bcd6-e20515780d8d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.206722] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71758f2b-40e1-4100-b067-d554f87fd103 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.243976] env[62914]: ERROR nova.compute.manager [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Failed to detach volume d801e165-dc65-4457-9762-f209bc342e87 from /dev/sda: nova.exception.InstanceNotFound: Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e could not be found. [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Traceback (most recent call last): [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self.driver.rebuild(**kwargs) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise NotImplementedError() [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] NotImplementedError [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] During handling of the above exception, another exception occurred: [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Traceback (most recent call last): [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self.driver.detach_volume(context, old_connection_info, [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] return self._volumeops.detach_volume(connection_info, instance) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._detach_volume_vmdk(connection_info, instance) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] stable_ref.fetch_moref(session) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] nova.exception.InstanceNotFound: Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e could not be found. [ 1149.243976] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.258337] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1149.258515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.736s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.258816] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.205s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.259056] env[62914]: DEBUG nova.objects.instance [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'resources' on Instance uuid 06e8b438-01ef-481f-8e27-2faa01bb97aa {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.366925] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1149.366925] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1149.366925] env[62914]: _type = "HttpNfcLease" [ 1149.366925] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1149.391032] env[62914]: DEBUG nova.compute.utils [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Build of instance 19f21caa-7d96-4526-bb12-768c4fe4d23e aborted: Failed to rebuild volume backed instance. {{(pid=62914) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1149.393658] env[62914]: ERROR nova.compute.manager [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 19f21caa-7d96-4526-bb12-768c4fe4d23e aborted: Failed to rebuild volume backed instance. [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Traceback (most recent call last): [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self.driver.rebuild(**kwargs) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise NotImplementedError() [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] NotImplementedError [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] During handling of the above exception, another exception occurred: [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Traceback (most recent call last): [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._detach_root_volume(context, instance, root_bdm) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] with excutils.save_and_reraise_exception(): [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self.force_reraise() [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise self.value [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self.driver.detach_volume(context, old_connection_info, [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] return self._volumeops.detach_volume(connection_info, instance) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._detach_volume_vmdk(connection_info, instance) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] stable_ref.fetch_moref(session) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] nova.exception.InstanceNotFound: Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e could not be found. [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] During handling of the above exception, another exception occurred: [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Traceback (most recent call last): [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 10941, in _error_out_instance_on_exception [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] yield [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1149.393658] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._do_rebuild_instance_with_claim( [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._do_rebuild_instance( [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._rebuild_default_impl(**kwargs) [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] self._rebuild_volume_backed_instance( [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] raise exception.BuildAbortException( [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] nova.exception.BuildAbortException: Build of instance 19f21caa-7d96-4526-bb12-768c4fe4d23e aborted: Failed to rebuild volume backed instance. [ 1149.394953] env[62914]: ERROR nova.compute.manager [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] [ 1149.500442] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.540846] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832764, 'name': ReconfigVM_Task, 'duration_secs': 0.903799} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.541135] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1149.546147] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cc17f1b-2d70-4763-b6b6-9fdb59977c96 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.561130] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1149.561130] env[62914]: value = "task-4832768" [ 1149.561130] env[62914]: _type = "Task" [ 1149.561130] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.572542] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832768, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.576088] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1149.576299] env[62914]: DEBUG nova.compute.manager [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1149.577091] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55447533-269e-4eed-b64b-bb18fb5d2ddf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.585552] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.585776] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.585961] env[62914]: DEBUG nova.network.neutron [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1149.869232] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1149.869232] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1149.869232] env[62914]: _type = "HttpNfcLease" [ 1149.869232] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1149.969082] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d8f111-5064-455b-8385-e3f9bb084c3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.977483] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c632b45-3d35-4e8a-a1d4-e680cc15777a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.013188] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbcd612-9a50-43d5-a02a-b87b21eafc5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.023380] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.024927] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e53f5e-7245-4227-a83b-61792f79d79e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.040071] env[62914]: DEBUG nova.compute.provider_tree [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.073254] env[62914]: DEBUG oslo_vmware.api [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832768, 'name': ReconfigVM_Task, 'duration_secs': 0.230289} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.073658] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942062', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'name': 'volume-73ef6951-5fab-40ea-bbd6-8971648c87c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b26b5d7-524a-41af-ab75-a158568e031e', 'attached_at': '', 'detached_at': '', 'volume_id': '73ef6951-5fab-40ea-bbd6-8971648c87c6', 'serial': '73ef6951-5fab-40ea-bbd6-8971648c87c6'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1150.221998] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1150.222277] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1150.222474] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleting the datastore file [datastore1] b285198b-aa95-4dcb-99b3-531d09c210d0 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1150.222768] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88639c6d-ad4a-4f78-b6c9-cf5eeedcaeb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.233119] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1150.233119] env[62914]: value = "task-4832769" [ 1150.233119] env[62914]: _type = "Task" [ 1150.233119] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.242967] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.355348] env[62914]: DEBUG nova.network.neutron [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.366559] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1150.366559] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1150.366559] env[62914]: _type = "HttpNfcLease" [ 1150.366559] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1150.518819] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832760, 'name': ReconfigVM_Task, 'duration_secs': 2.29187} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.519130] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfigured VM instance instance-00000069 to detach disk 2002 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1150.519333] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1150.519605] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85b451d9-4dbe-4cd4-9a28-5ae9d9dfce5a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.526427] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1150.526427] env[62914]: value = "task-4832770" [ 1150.526427] env[62914]: _type = "Task" [ 1150.526427] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.534640] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.542881] env[62914]: DEBUG nova.scheduler.client.report [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1150.618154] env[62914]: DEBUG nova.objects.instance [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'flavor' on Instance uuid 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.744806] env[62914]: DEBUG oslo_vmware.api [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396545} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.745180] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.745446] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1150.745674] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.745927] env[62914]: INFO nova.compute.manager [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Took 4.02 seconds to destroy the instance on the hypervisor. [ 1150.746205] env[62914]: DEBUG oslo.service.loopingcall [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.746415] env[62914]: DEBUG nova.compute.manager [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1150.746556] env[62914]: DEBUG nova.network.neutron [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.857863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.867750] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1150.867750] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1150.867750] env[62914]: _type = "HttpNfcLease" [ 1150.867750] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1150.868110] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1150.868110] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5220787e-7a09-0eb8-08cb-f8cc83d336c7" [ 1150.868110] env[62914]: _type = "HttpNfcLease" [ 1150.868110] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1150.868803] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba67498-9ad9-4cc6-9c62-aac084666c87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.876165] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1150.876462] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1150.946032] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5f340711-6521-4f15-bc22-9b5bb74ee3fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.038642] env[62914]: DEBUG oslo_vmware.api [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832770, 'name': PowerOnVM_Task, 'duration_secs': 0.508011} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.039032] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1151.039294] env[62914]: DEBUG nova.compute.manager [None req-7224fcb3-e7c9-48f9-a9e9-9475496325f2 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1151.040193] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b894fc0a-3ddb-4156-b5e7-b2644375d650 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.048034] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.056031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.203s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.056031] env[62914]: DEBUG nova.objects.instance [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid 4d22f1d3-b37a-4356-a41c-516a19f78538 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.073420] env[62914]: INFO nova.scheduler.client.report [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted allocations for instance 06e8b438-01ef-481f-8e27-2faa01bb97aa [ 1151.439545] env[62914]: DEBUG nova.compute.manager [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-vif-unplugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1151.439545] env[62914]: DEBUG oslo_concurrency.lockutils [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.439545] env[62914]: DEBUG oslo_concurrency.lockutils [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.439545] env[62914]: DEBUG oslo_concurrency.lockutils [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.439545] env[62914]: DEBUG nova.compute.manager [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] No waiting events found dispatching network-vif-unplugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.439545] env[62914]: WARNING nova.compute.manager [req-347f0df1-2561-469c-aa3d-cc55eb2b101f req-39c67458-f47e-458f-aef2-2a7f87f33bf6 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received unexpected event network-vif-unplugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 for instance with vm_state shelved and task_state shelving_offloading. [ 1151.441091] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.581810] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1d512f0e-b7f3-4c90-b95b-47eb8dfe58e7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "06e8b438-01ef-481f-8e27-2faa01bb97aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.971s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.627170] env[62914]: DEBUG oslo_concurrency.lockutils [None req-aea99217-e9c6-4144-8a94-e4e0810cffdd tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.770s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.629212] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_power_states {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.707992] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1151.711387] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab355d5-45e1-4200-bd59-365f316bcea0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.723405] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1151.725325] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-796ae454-3d93-4c09-8bbd-11255d406c6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.785055] env[62914]: DEBUG nova.compute.manager [req-a98d0d08-07d3-41d2-945c-79aac482bc7a req-41379a49-ab76-4979-852f-22214aa1e9df service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Received event network-vif-deleted-57890d0b-660c-4230-8104-4d1ae53eb7ce {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1151.786337] env[62914]: INFO nova.compute.manager [req-a98d0d08-07d3-41d2-945c-79aac482bc7a req-41379a49-ab76-4979-852f-22214aa1e9df service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Neutron deleted interface 57890d0b-660c-4230-8104-4d1ae53eb7ce; detaching it from the instance and deleting it from the info cache [ 1151.786337] env[62914]: DEBUG nova.network.neutron [req-a98d0d08-07d3-41d2-945c-79aac482bc7a req-41379a49-ab76-4979-852f-22214aa1e9df service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.802799] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1151.803224] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1151.803533] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleting the datastore file [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.806127] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bec6ee08-d07e-49cd-8df2-5466986a9fb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.819833] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1151.819833] env[62914]: value = "task-4832772" [ 1151.819833] env[62914]: _type = "Task" [ 1151.819833] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.831727] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.880095] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8be833a-cce1-491e-887a-277c3c778cfc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.899097] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0a3a9e-8317-4917-87f6-76fa8e7d1ed7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.947668] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85aa13eb-5117-4bc3-9179-457abe6faff4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.959604] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cdfe5b-8c67-4ab3-be19-66c7cb8c4087 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.979064] env[62914]: DEBUG nova.compute.provider_tree [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.056340] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.056681] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.057436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.057632] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.057832] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.060206] env[62914]: INFO nova.compute.manager [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Terminating instance [ 1152.062609] env[62914]: DEBUG nova.compute.manager [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1152.062933] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1152.063966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a776a04-9681-4b3c-975f-12dd9b590aed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.072721] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1152.073183] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71043a2f-8fa4-4918-a0a7-5b2b562f993e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.080286] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1152.080286] env[62914]: value = "task-4832773" [ 1152.080286] env[62914]: _type = "Task" [ 1152.080286] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.090159] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.135132] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Getting list of instances from cluster (obj){ [ 1152.135132] env[62914]: value = "domain-c8" [ 1152.135132] env[62914]: _type = "ClusterComputeResource" [ 1152.135132] env[62914]: } {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1152.136291] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20381b3a-a210-45d3-826d-7767b13bac2d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.163723] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Got total of 10 instances {{(pid=62914) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1152.163946] env[62914]: WARNING nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] While synchronizing instance power states, found 14 instances in the database and 10 instances on the hypervisor. [ 1152.164194] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid e061304c-998b-4331-b60d-809916844a6f {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.164240] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.164386] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.164536] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.164711] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 0c47848d-fcff-404d-8e84-e9fd09be9e9e {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.164875] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165080] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid b285198b-aa95-4dcb-99b3-531d09c210d0 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165272] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165471] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165471] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165714] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 19f21caa-7d96-4526-bb12-768c4fe4d23e {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165811] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.165959] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 4d22f1d3-b37a-4356-a41c-516a19f78538 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.166202] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Triggering sync for uuid 9673614c-44c9-4348-b528-0bd28c892a11 {{(pid=62914) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10414}} [ 1152.170783] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "e061304c-998b-4331-b60d-809916844a6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.171506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "e061304c-998b-4331-b60d-809916844a6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.171506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.171663] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.171939] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.172459] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.172751] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.172993] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.173326] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.173621] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.173900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "3b26b5d7-524a-41af-ab75-a158568e031e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.174258] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "b285198b-aa95-4dcb-99b3-531d09c210d0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.174612] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.174787] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.175368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.175368] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.175525] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.175842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.176172] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.176382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.176898] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "4d22f1d3-b37a-4356-a41c-516a19f78538" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.177374] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.177374] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "9673614c-44c9-4348-b528-0bd28c892a11" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.177891] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1152.177891] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "9673614c-44c9-4348-b528-0bd28c892a11" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.180526] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-face8c8d-7a6d-405b-b7e0-9371bb7ab6b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.184363] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4fec84-6fd8-4b51-99c8-bb821ba677b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.187946] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4e3220-5681-4dce-a517-ff96dfe4a356 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.190646] env[62914]: DEBUG nova.network.neutron [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.192856] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f225aa7-17f5-44dd-aa1e-df445d2c5f4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.197838] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3827e6fa-62a5-4ebb-9e23-ea89f593cdde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.201196] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc7f826-a303-4039-bcb9-2477db458422 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.206138] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3252f4e5-d87e-4fc7-b81b-9c6d89ddc358 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.209172] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59caa24-eebb-42e4-b4d0-e43cb6baf3de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.215054] env[62914]: INFO nova.compute.manager [-] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Took 1.47 seconds to deallocate network for instance. [ 1152.265711] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac5be5c-d0b5-4203-9908-732d65aab0dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.292623] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5da619f-fd0a-41a5-b50f-4625d4f3a37b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.305752] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da581e29-cf79-42ab-bef6-fd11c526a856 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.349392] env[62914]: DEBUG oslo_vmware.api [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832772, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177347} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.349719] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.349920] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1152.350114] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1152.354881] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1152.354881] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1152.370318] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018ea5c3-a895-4bfd-8ce7-1af5176e68f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.374795] env[62914]: DEBUG nova.compute.manager [req-a98d0d08-07d3-41d2-945c-79aac482bc7a req-41379a49-ab76-4979-852f-22214aa1e9df service nova] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] Detach interface failed, port_id=57890d0b-660c-4230-8104-4d1ae53eb7ce, reason: Instance b285198b-aa95-4dcb-99b3-531d09c210d0 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1152.382447] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1152.382447] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1152.382447] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2c305812-6ac4-40b8-b93a-c0128b618a2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.394014] env[62914]: INFO nova.scheduler.client.report [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted allocations for instance 88acf376-122d-4796-8400-dfc4c7ec45d7 [ 1152.453872] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.482326] env[62914]: DEBUG nova.scheduler.client.report [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1152.591121] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832773, 'name': PowerOffVM_Task, 'duration_secs': 0.267237} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.591444] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1152.591617] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1152.592239] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d912e2d6-2b81-4196-b8bf-c71e22a2df33 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.635770] env[62914]: DEBUG oslo_vmware.rw_handles [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52101391-36a5-89a0-789c-ed59c503b808/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1152.636069] env[62914]: INFO nova.virt.vmwareapi.images [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Downloaded image file data 598263a1-d518-4327-a7bf-74fedbc43cae [ 1152.637147] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff57498d-ec31-4678-a1c4-9d854b76f1a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.653906] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca5df122-75a7-445e-a1fe-a863ed619974 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.673288] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1152.674092] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1152.674092] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore2] 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.674092] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab8b2c32-bd72-40e8-8658-1f7e558e24ac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.681192] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1152.681192] env[62914]: value = "task-4832776" [ 1152.681192] env[62914]: _type = "Task" [ 1152.681192] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.690801] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.707552] env[62914]: INFO nova.virt.vmwareapi.images [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] The imported VM was unregistered [ 1152.709867] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1152.710135] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Creating directory with path [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.710444] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01c6a6ab-985e-4a5a-8336-d919e5f93a0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.722504] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Created directory with path [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.722795] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21/OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21.vmdk to [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk. {{(pid=62914) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1152.723087] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8476d265-b190-4b54-a028-5092b7234da8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.730052] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1152.730052] env[62914]: value = "task-4832777" [ 1152.730052] env[62914]: _type = "Task" [ 1152.730052] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.739142] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.759209] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.759676] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.760024] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.760354] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.585s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.763297] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.589s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.788094] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.611s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.793887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "e061304c-998b-4331-b60d-809916844a6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.623s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.797858] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.625s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.844918] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1152.844918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.667s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.844918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.390s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.844918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.844918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.844918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.846828] env[62914]: INFO nova.compute.manager [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Terminating instance [ 1152.849269] env[62914]: DEBUG nova.compute.manager [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1152.849695] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61703424-1e22-4909-9e99-5943c9de53b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.860262] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f13721-f3be-4333-b0b4-d002d27fbc9a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.898707] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.899186] env[62914]: WARNING nova.virt.vmwareapi.driver [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e could not be found. [ 1152.899379] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1152.899738] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da1f9d59-1bc9-43e9-b5ef-6da9d87fdc4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.909293] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61b8793-116b-4dcb-9b5b-c9d4a2b44ae4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.946161] env[62914]: WARNING nova.virt.vmwareapi.vmops [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 19f21caa-7d96-4526-bb12-768c4fe4d23e could not be found. [ 1152.947031] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1152.947031] env[62914]: INFO nova.compute.manager [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1152.947031] env[62914]: DEBUG oslo.service.loopingcall [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.947254] env[62914]: DEBUG nova.compute.manager [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1152.947363] env[62914]: DEBUG nova.network.neutron [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1152.987735] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.990594] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.542s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.020628] env[62914]: INFO nova.scheduler.client.report [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance 4d22f1d3-b37a-4356-a41c-516a19f78538 [ 1153.193329] env[62914]: DEBUG oslo_vmware.api [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162179} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.193485] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1153.193736] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1153.193918] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1153.194127] env[62914]: INFO nova.compute.manager [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1153.194422] env[62914]: DEBUG oslo.service.loopingcall [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.194856] env[62914]: DEBUG nova.compute.manager [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1153.194856] env[62914]: DEBUG nova.network.neutron [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1153.243788] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.486709] env[62914]: DEBUG nova.compute.manager [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1153.486904] env[62914]: DEBUG nova.compute.manager [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing instance network info cache due to event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1153.487525] env[62914]: DEBUG oslo_concurrency.lockutils [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.487711] env[62914]: DEBUG oslo_concurrency.lockutils [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.487893] env[62914]: DEBUG nova.network.neutron [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1153.497030] env[62914]: INFO nova.compute.claims [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.531098] env[62914]: DEBUG oslo_concurrency.lockutils [None req-901aa74d-a63b-4f7f-bc39-8d61f54f9565 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.834s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.531835] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.355s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.533142] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbedbc5f-8af3-4dcd-b43a-7e00743a60c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.544864] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f8cd72-558f-4576-87d9-6a3610406bd7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.744098] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.004354] env[62914]: INFO nova.compute.resource_tracker [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating resource usage from migration 2877ce41-ea0e-4f50-ba01-f1780ded0468 [ 1154.090793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "4d22f1d3-b37a-4356-a41c-516a19f78538" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.249867] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.255103] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc2c460-c998-4dfb-87e0-49037200242b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.265205] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed00f236-fb55-4bba-9046-eb9f455c071a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.303900] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51feeb04-10a4-4a3f-90b7-97f99daf1c35 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.308348] env[62914]: DEBUG nova.compute.manager [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1154.308567] env[62914]: DEBUG nova.compute.manager [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing instance network info cache due to event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1154.308844] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.308935] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.309111] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1154.321312] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc27c89-e980-4987-8cd1-3c63b6ffed71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.341378] env[62914]: DEBUG nova.compute.provider_tree [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.423906] env[62914]: DEBUG nova.network.neutron [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updated VIF entry in instance network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1154.424312] env[62914]: DEBUG nova.network.neutron [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap949a1716-cb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.491232] env[62914]: DEBUG nova.network.neutron [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.744403] env[62914]: DEBUG nova.network.neutron [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.745949] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.848246] env[62914]: DEBUG nova.scheduler.client.report [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.927630] env[62914]: DEBUG oslo_concurrency.lockutils [req-57b0de98-6a24-4bcf-8892-615e653d55c0 req-72939f73-d64f-4c62-a8c4-54d3e56fd0c8 service nova] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.994314] env[62914]: INFO nova.compute.manager [-] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Took 1.80 seconds to deallocate network for instance. [ 1155.141347] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updated VIF entry in instance network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1155.141672] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.247483] env[62914]: INFO nova.compute.manager [-] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Took 2.30 seconds to deallocate network for instance. [ 1155.259239] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.364112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.373s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.364342] env[62914]: INFO nova.compute.manager [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Migrating [ 1155.382440] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.941s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.395169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "e061304c-998b-4331-b60d-809916844a6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.395169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.395271] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "e061304c-998b-4331-b60d-809916844a6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.398590] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.398590] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.398590] env[62914]: INFO nova.compute.manager [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Terminating instance [ 1155.399501] env[62914]: DEBUG nova.compute.manager [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1155.399907] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1155.402719] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c80d4a8-159c-46cc-9c7a-de28bc813e60 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.412592] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1155.412861] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf91a031-4bac-4700-aaa2-a4de08b6d93d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.423733] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1155.423733] env[62914]: value = "task-4832778" [ 1155.423733] env[62914]: _type = "Task" [ 1155.423733] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.432863] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.508148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.602965] env[62914]: DEBUG nova.compute.manager [req-84b16caf-0a88-4999-a151-f117fbcaa43e req-bec8b0a6-d45b-468f-85f9-a9a166709e00 service nova] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Received event network-vif-deleted-3724af6c-0dc9-4056-9f6c-d5d8f85f195e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1155.644246] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.644524] env[62914]: DEBUG nova.compute.manager [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1155.644705] env[62914]: DEBUG nova.compute.manager [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing instance network info cache due to event network-changed-d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1155.644975] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Acquiring lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.645148] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Acquired lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.645319] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Refreshing network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1155.650018] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c18b2b-3755-42a7-bd02-9a94afa3ef85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.657505] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.658202] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.663192] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc4a9da-3971-429d-8b16-862c98b153b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.694784] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e07012b-e256-46ac-8930-f31331ea7af1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.703387] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69c3eea-ad6e-4736-ba84-13518baf0894 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.718646] env[62914]: DEBUG nova.compute.provider_tree [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.745427] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832777, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.55452} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.745735] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21/OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21.vmdk to [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk. [ 1155.745935] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Cleaning up location [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1155.746130] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6340a0f3-c1f8-482b-a33a-666945132d21 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1155.746527] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d5a6b5c-ddd2-4245-829b-4277010cf5da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.748521] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.754284] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1155.754284] env[62914]: value = "task-4832779" [ 1155.754284] env[62914]: _type = "Task" [ 1155.754284] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.763156] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.820664] env[62914]: INFO nova.compute.manager [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Took 0.57 seconds to detach 1 volumes for instance. [ 1155.822970] env[62914]: DEBUG nova.compute.manager [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Deleting volume: d801e165-dc65-4457-9762-f209bc342e87 {{(pid=62914) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1155.895155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.895155] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.895155] env[62914]: DEBUG nova.network.neutron [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1155.937288] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832778, 'name': PowerOffVM_Task, 'duration_secs': 0.21675} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.937636] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1155.937874] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1155.938424] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cae74cbb-6c16-468b-ab6d-545575b25cb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.019998] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1156.020353] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1156.020484] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleting the datastore file [datastore2] e061304c-998b-4331-b60d-809916844a6f {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.021923] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad58e3f8-cb1d-4e8f-b43e-399b7f8f63a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.031251] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for the task: (returnval){ [ 1156.031251] env[62914]: value = "task-4832782" [ 1156.031251] env[62914]: _type = "Task" [ 1156.031251] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.040930] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.165780] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1156.224392] env[62914]: DEBUG nova.scheduler.client.report [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1156.273885] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043582} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.274356] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.274639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.275041] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk to [datastore1] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1156.275436] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71dfec67-cb4d-4a17-b65b-955d8c4067a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.285711] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1156.285711] env[62914]: value = "task-4832783" [ 1156.285711] env[62914]: _type = "Task" [ 1156.285711] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.298389] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.317203] env[62914]: DEBUG nova.compute.manager [req-4d451360-a6ef-4a82-9e0c-5100c578f0a8 req-f8497367-15dd-4e5c-9908-039c846f17d5 service nova] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] Received event network-vif-deleted-689aba7f-31af-4116-8b4e-bcec10c9c5ba {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1156.383925] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.471116] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updated VIF entry in instance network info cache for port d3377942-1cea-43ef-8a80-ebe5519d491c. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1156.471334] env[62914]: DEBUG nova.network.neutron [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [{"id": "d3377942-1cea-43ef-8a80-ebe5519d491c", "address": "fa:16:3e:64:e3:92", "network": {"id": "ac1e1da4-227c-4355-9cbf-66b09fd46d5c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-613387998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d77829ac81cd41f2a4acdd571295ca6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3377942-1c", "ovs_interfaceid": "d3377942-1cea-43ef-8a80-ebe5519d491c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.540058] env[62914]: DEBUG oslo_vmware.api [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Task: {'id': task-4832782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167978} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.542978] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.542978] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1156.543207] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1156.543394] env[62914]: INFO nova.compute.manager [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] [instance: e061304c-998b-4331-b60d-809916844a6f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1156.543700] env[62914]: DEBUG oslo.service.loopingcall [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.543918] env[62914]: DEBUG nova.compute.manager [-] [instance: e061304c-998b-4331-b60d-809916844a6f] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1156.544036] env[62914]: DEBUG nova.network.neutron [-] [instance: e061304c-998b-4331-b60d-809916844a6f] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1156.705355] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.734233] env[62914]: DEBUG oslo_concurrency.lockutils [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.349s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.734233] env[62914]: INFO nova.compute.manager [None req-30a26262-130e-41c3-a76f-20e0096ae247 tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] [instance: 19f21caa-7d96-4526-bb12-768c4fe4d23e] Successfully reverted task state from rebuilding on failure for instance. [ 1156.744837] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.985s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.744837] env[62914]: DEBUG nova.objects.instance [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'resources' on Instance uuid b285198b-aa95-4dcb-99b3-531d09c210d0 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.801579] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.825112] env[62914]: DEBUG nova.network.neutron [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [{"id": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "address": "fa:16:3e:d3:1d:70", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6def1a-05", "ovs_interfaceid": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.974866] env[62914]: DEBUG oslo_concurrency.lockutils [req-7bb45055-021f-4df9-b5b2-3069b2c61907 req-267fbf6f-4927-4189-bfd7-6a4299d80edd service nova] Releasing lock "refresh_cache-f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.299711] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.327740] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.469675] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a74c49-3fd2-4a8f-af2d-a49d51dd6362 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.479412] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34f818d-6cb6-44cb-bc34-63a8471f52da {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.512862] env[62914]: DEBUG nova.network.neutron [-] [instance: e061304c-998b-4331-b60d-809916844a6f] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.515161] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997e7abf-ddd0-4cb6-8dca-b542f103de11 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.524521] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76367b0f-fd90-4475-bcb3-3bb3a0fb1cec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.542204] env[62914]: DEBUG nova.compute.provider_tree [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.799194] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.019673] env[62914]: INFO nova.compute.manager [-] [instance: e061304c-998b-4331-b60d-809916844a6f] Took 1.48 seconds to deallocate network for instance. [ 1158.046098] env[62914]: DEBUG nova.scheduler.client.report [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1158.300308] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.343551] env[62914]: DEBUG nova.compute.manager [req-aef962b8-cd6e-4742-95f6-4fae97ded612 req-e93213f9-822e-4e93-8b8a-8e8be3eb578f service nova] [instance: e061304c-998b-4331-b60d-809916844a6f] Received event network-vif-deleted-2de06f63-3449-4e6e-af95-5835f882045b {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1158.527513] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.551988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.808s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.554602] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.656s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.555260] env[62914]: DEBUG nova.objects.instance [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'resources' on Instance uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.580834] env[62914]: INFO nova.scheduler.client.report [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted allocations for instance b285198b-aa95-4dcb-99b3-531d09c210d0 [ 1158.798916] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832783, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.491783} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.799640] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/598263a1-d518-4327-a7bf-74fedbc43cae/598263a1-d518-4327-a7bf-74fedbc43cae.vmdk to [datastore1] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1158.800539] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b882ddf-f528-408a-9671-3c5fff91cbb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.826391] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1158.826826] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-772b972f-3cb7-433f-accd-078852d34a40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.846945] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569c66da-4313-4193-a0e6-34e3929f9f57 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.852150] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1158.852150] env[62914]: value = "task-4832784" [ 1158.852150] env[62914]: _type = "Task" [ 1158.852150] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.863772] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832784, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.881134] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1159.060973] env[62914]: DEBUG nova.objects.instance [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'numa_topology' on Instance uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.089183] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7b65818c-84c7-49f4-9fee-40a4190b24c8 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.370s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.090680] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.916s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.090889] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: b285198b-aa95-4dcb-99b3-531d09c210d0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1159.091081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "b285198b-aa95-4dcb-99b3-531d09c210d0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.364048] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832784, 'name': ReconfigVM_Task, 'duration_secs': 0.420649} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.364373] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Reconfigured VM instance instance-00000066 to attach disk [datastore1] e730b472-fca8-4041-a00c-91bee25232f7/e730b472-fca8-4041-a00c-91bee25232f7.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.364868] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbdcb663-74c6-46b9-9637-4eb2f5f364be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.371308] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1159.371308] env[62914]: value = "task-4832785" [ 1159.371308] env[62914]: _type = "Task" [ 1159.371308] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.379927] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832785, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.391911] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1159.392265] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44c580a1-5fa0-4302-8164-c9a807cea261 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.400041] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1159.400041] env[62914]: value = "task-4832786" [ 1159.400041] env[62914]: _type = "Task" [ 1159.400041] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.409357] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.563580] env[62914]: DEBUG nova.objects.base [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Object Instance<88acf376-122d-4796-8400-dfc4c7ec45d7> lazy-loaded attributes: resources,numa_topology {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1159.762852] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d3ca0e-2fe6-4f19-8fc0-ca14916ee6e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.771056] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4417679d-0214-420a-b8e5-7511dcf2d108 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.808269] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a9f4e9-e96c-4f42-8ab5-592ca13668db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.817379] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2282e49a-8f5a-4041-a291-31c87ce505d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.833099] env[62914]: DEBUG nova.compute.provider_tree [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.882851] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832785, 'name': Rename_Task, 'duration_secs': 0.225631} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.883212] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1159.883496] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91d5c147-a84a-4629-9d1f-908a5139213a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.892737] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1159.892737] env[62914]: value = "task-4832787" [ 1159.892737] env[62914]: _type = "Task" [ 1159.892737] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.902067] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.910107] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832786, 'name': PowerOffVM_Task, 'duration_secs': 0.237442} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.910410] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1159.910620] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1160.337393] env[62914]: DEBUG nova.scheduler.client.report [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1160.403326] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832787, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.417639] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1160.417888] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1160.418063] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.418265] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1160.418420] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.418576] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1160.418788] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1160.419037] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1160.419241] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1160.419416] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1160.419598] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1160.425219] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f27d2168-4223-416b-af82-242601c2245a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.442711] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1160.442711] env[62914]: value = "task-4832788" [ 1160.442711] env[62914]: _type = "Task" [ 1160.442711] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.842913] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.288s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.845549] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.338s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.845807] env[62914]: DEBUG nova.objects.instance [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'resources' on Instance uuid 3b26b5d7-524a-41af-ab75-a158568e031e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.904031] env[62914]: DEBUG oslo_vmware.api [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832787, 'name': PowerOnVM_Task, 'duration_secs': 0.772448} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.904369] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1160.960022] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.962783] env[62914]: DEBUG nova.compute.manager [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1161.018727] env[62914]: DEBUG nova.compute.manager [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1161.019741] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd1ac54-1634-4abf-8fd9-d47cf12e2b64 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.354452] env[62914]: DEBUG oslo_concurrency.lockutils [None req-6c543a8b-444a-4b58-aa52-459c2a59bb46 tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.718s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.355424] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.180s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.355629] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] During sync_power_state the instance has a pending task (shelving_offloading). Skip. [ 1161.355814] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.356243] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.608s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.356420] env[62914]: INFO nova.compute.manager [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Unshelving [ 1161.456160] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832788, 'name': ReconfigVM_Task, 'duration_secs': 0.625522} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.458691] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1161.479039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.529338] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbccf029-f3de-4a7c-9b73-e03cabc1dfa6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.541388] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3e227a35-2a45-4a65-8689-841491e60405 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.020s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.543010] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db198353-c7d9-422c-9c23-e4d93460a043 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.546529] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "e730b472-fca8-4041-a00c-91bee25232f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.372s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.546733] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: e730b472-fca8-4041-a00c-91bee25232f7] During sync_power_state the instance has a pending task (spawning). Skip. [ 1161.546918] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "e730b472-fca8-4041-a00c-91bee25232f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.577548] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cc43e6-24eb-40e9-abd2-f39b8a8a5df1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.586456] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1bd913-a631-44fc-b942-8c5c377394c5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.601076] env[62914]: DEBUG nova.compute.provider_tree [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.965480] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1161.965731] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1161.965957] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.966192] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1161.966348] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.966665] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1161.966704] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1161.967117] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1161.967117] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1161.967358] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1161.967465] env[62914]: DEBUG nova.virt.hardware [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1161.972812] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfiguring VM instance instance-00000075 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1161.973128] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a315476a-1193-4328-8f8d-28a1863cdd20 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.993936] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1161.993936] env[62914]: value = "task-4832789" [ 1161.993936] env[62914]: _type = "Task" [ 1161.993936] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.002654] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832789, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.104085] env[62914]: DEBUG nova.scheduler.client.report [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1162.382296] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.504674] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832789, 'name': ReconfigVM_Task, 'duration_secs': 0.214688} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.505032] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfigured VM instance instance-00000075 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1162.505750] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e200d0-90d5-4bef-8475-980b30cbef68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.528206] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1162.528511] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6278a6d1-243f-4963-8b85-5684a433fc2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.546900] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1162.546900] env[62914]: value = "task-4832790" [ 1162.546900] env[62914]: _type = "Task" [ 1162.546900] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.555077] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.609219] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.611642] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.228s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.611908] env[62914]: DEBUG nova.objects.instance [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lazy-loading 'resources' on Instance uuid 19f21caa-7d96-4526-bb12-768c4fe4d23e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.630944] env[62914]: INFO nova.scheduler.client.report [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted allocations for instance 3b26b5d7-524a-41af-ab75-a158568e031e [ 1163.060499] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832790, 'name': ReconfigVM_Task, 'duration_secs': 0.405751} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.060993] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11/9673614c-44c9-4348-b528-0bd28c892a11.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.061357] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1163.138688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0955b9c2-42c0-4100-ac03-1440136bf13f tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.082s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.139705] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.966s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.139940] env[62914]: INFO nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 3b26b5d7-524a-41af-ab75-a158568e031e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1163.140183] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "3b26b5d7-524a-41af-ab75-a158568e031e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.301533] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a89178-89e1-4660-b9c0-01b4a02a6b58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.310820] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d750005-9d1d-4722-8452-8704521d0dd4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.346312] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f71e75-4b88-48b5-9547-546e592126aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.360090] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40876d21-a950-471c-8672-5737726ff4fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.374126] env[62914]: DEBUG nova.compute.provider_tree [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.569086] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4ecb42-5a27-490a-a63b-5aa4b0eec346 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.591727] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794a1ec3-ad56-4347-a966-371d96f89193 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.610870] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1163.877671] env[62914]: DEBUG nova.scheduler.client.report [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1164.159822] env[62914]: DEBUG nova.network.neutron [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Port 2c6def1a-051c-4671-bee1-4eeefcd24ae3 binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1164.383488] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.772s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.385952] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.681s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.387741] env[62914]: INFO nova.compute.claims [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1164.543929] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.544178] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.903184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2a85bb2b-9b7a-4418-b53d-c25f0cd1287f tempest-ServerActionsV293TestJSON-301243714 tempest-ServerActionsV293TestJSON-301243714-project-member] Lock "19f21caa-7d96-4526-bb12-768c4fe4d23e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.060s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.046679] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1165.186014] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.186206] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.186391] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.564140] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.568088] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb27e5ed-fe78-4770-9c3a-e82c7dda3899 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.576474] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed3a7d5-80db-4bac-8f72-915d93e3a1bb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.606934] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a8699d-ed06-4596-aae8-a903908a5d1c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.614777] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa2eca0-e6ef-41eb-b6a1-dcdf66621165 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.629054] env[62914]: DEBUG nova.compute.provider_tree [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.132223] env[62914]: DEBUG nova.scheduler.client.report [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.220107] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.220322] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.220509] env[62914]: DEBUG nova.network.neutron [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1166.637997] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.638482] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1166.643221] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.116s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.643548] env[62914]: DEBUG nova.objects.instance [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lazy-loading 'resources' on Instance uuid e061304c-998b-4331-b60d-809916844a6f {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.934284] env[62914]: DEBUG nova.network.neutron [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [{"id": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "address": "fa:16:3e:d3:1d:70", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6def1a-05", "ovs_interfaceid": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.150379] env[62914]: DEBUG nova.compute.utils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1167.151913] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1167.152105] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1167.195761] env[62914]: DEBUG nova.policy [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ea29d6698d4734a5def35fe065fe21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b59bf6daf8c246f7b034dc0adcfc8cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1167.342496] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c74f69-a9e7-44b1-9134-b9a069a45ead {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.350805] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65e45d4-5c9e-4b61-b4f3-a8122fd29244 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.386653] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115c8daf-58cb-4292-8046-3b7e187b8f4d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.394879] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00484d3-c6e1-40bc-904d-c3163cbd3014 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.408645] env[62914]: DEBUG nova.compute.provider_tree [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.439410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.501482] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Successfully created port: 38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.655510] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1167.912035] env[62914]: DEBUG nova.scheduler.client.report [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.966557] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f811e3-90d7-48ed-b4dd-30a5a5887458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.986224] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6a1774-b530-4a6a-adf7-af89dfa16cb8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.994172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1168.417628] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.420895] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.942s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.446239] env[62914]: INFO nova.scheduler.client.report [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Deleted allocations for instance e061304c-998b-4331-b60d-809916844a6f [ 1168.500818] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1168.501909] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f97b3397-d818-4d01-808c-b7a8369f3119 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.510344] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1168.510344] env[62914]: value = "task-4832791" [ 1168.510344] env[62914]: _type = "Task" [ 1168.510344] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.519561] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.667013] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1168.760186] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1168.760446] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1168.760611] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.760799] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1168.760952] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.761209] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1168.761611] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1168.762023] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1168.762780] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1168.763047] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1168.763309] env[62914]: DEBUG nova.virt.hardware [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1168.764337] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c75dfa7-518d-4418-ba37-f080dabe4cda {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.773600] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf0faa1-6b82-42c4-adca-36564de63cb9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.927112] env[62914]: INFO nova.compute.claims [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1168.954262] env[62914]: DEBUG oslo_concurrency.lockutils [None req-13eea630-255a-41d6-8d80-c56eb91e7230 tempest-ServersTestJSON-1645965215 tempest-ServersTestJSON-1645965215-project-member] Lock "e061304c-998b-4331-b60d-809916844a6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.559s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.021751] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832791, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.024223] env[62914]: DEBUG nova.compute.manager [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Received event network-vif-plugged-38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1169.024454] env[62914]: DEBUG oslo_concurrency.lockutils [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.024670] env[62914]: DEBUG oslo_concurrency.lockutils [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] Lock "cad97698-d68d-42de-a4de-772917e60374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.024962] env[62914]: DEBUG oslo_concurrency.lockutils [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] Lock "cad97698-d68d-42de-a4de-772917e60374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.025217] env[62914]: DEBUG nova.compute.manager [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] No waiting events found dispatching network-vif-plugged-38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1169.025491] env[62914]: WARNING nova.compute.manager [req-a0dd5f63-bcfa-4ea8-99a7-92da69753b01 req-60830e32-3b0e-4e52-a6ab-1651c024f999 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Received unexpected event network-vif-plugged-38dca680-426a-4e56-834d-e95b4f9d439f for instance with vm_state building and task_state spawning. [ 1169.233143] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Successfully updated port: 38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1169.435024] env[62914]: INFO nova.compute.resource_tracker [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating resource usage from migration 7cda217d-33cb-4559-bebb-00862b606af8 [ 1169.522143] env[62914]: DEBUG oslo_vmware.api [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832791, 'name': PowerOnVM_Task, 'duration_secs': 0.588386} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.524962] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1169.525356] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7f90bdc9-4bcc-4029-8f24-8adba0d01bb5 tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance '9673614c-44c9-4348-b528-0bd28c892a11' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1169.641890] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4089f4ef-4fe4-4c98-b052-e45ba39bdc21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.653024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad62dbb-988c-4558-a5ad-c25f56c0bc68 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.700948] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b9a180-686a-4ba4-9830-da6644fc6253 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.711808] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f527878-80bb-410c-a548-0253cea9e8a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.735700] env[62914]: DEBUG nova.compute.provider_tree [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.737639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.737842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.738143] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1170.241031] env[62914]: DEBUG nova.scheduler.client.report [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1170.290143] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1170.524027] env[62914]: DEBUG nova.network.neutron [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating instance_info_cache with network_info: [{"id": "38dca680-426a-4e56-834d-e95b4f9d439f", "address": "fa:16:3e:f0:f7:5b", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38dca680-42", "ovs_interfaceid": "38dca680-426a-4e56-834d-e95b4f9d439f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.747863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.326s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.748740] env[62914]: INFO nova.compute.manager [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Migrating [ 1170.756526] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.374s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.756761] env[62914]: DEBUG nova.objects.instance [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'pci_requests' on Instance uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.027226] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.027419] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Instance network_info: |[{"id": "38dca680-426a-4e56-834d-e95b4f9d439f", "address": "fa:16:3e:f0:f7:5b", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38dca680-42", "ovs_interfaceid": "38dca680-426a-4e56-834d-e95b4f9d439f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1171.028275] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:f7:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38dca680-426a-4e56-834d-e95b4f9d439f', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1171.041995] env[62914]: DEBUG oslo.service.loopingcall [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1171.048549] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cad97698-d68d-42de-a4de-772917e60374] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1171.052347] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a529a9e9-a59d-42c9-90a2-8e99b288e484 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.072915] env[62914]: DEBUG nova.compute.manager [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Received event network-changed-38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1171.073590] env[62914]: DEBUG nova.compute.manager [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Refreshing instance network info cache due to event network-changed-38dca680-426a-4e56-834d-e95b4f9d439f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1171.073767] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] Acquiring lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.073963] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] Acquired lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.074201] env[62914]: DEBUG nova.network.neutron [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Refreshing network info cache for port 38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1171.082590] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.082590] env[62914]: value = "task-4832792" [ 1171.082590] env[62914]: _type = "Task" [ 1171.082590] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.093965] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832792, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.268740] env[62914]: DEBUG nova.objects.instance [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'numa_topology' on Instance uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.269923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.270115] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.270301] env[62914]: DEBUG nova.network.neutron [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1171.579094] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.579415] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.579616] env[62914]: DEBUG nova.compute.manager [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Going to confirm migration 7 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1171.595636] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832792, 'name': CreateVM_Task, 'duration_secs': 0.332599} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.595866] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cad97698-d68d-42de-a4de-772917e60374] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1171.596592] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.596841] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.597639] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1171.597639] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdbd7744-87bf-4e16-a319-c02f1086b3ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.603049] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1171.603049] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52935049-593a-19b9-c0a8-b1c102849915" [ 1171.603049] env[62914]: _type = "Task" [ 1171.603049] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.614478] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52935049-593a-19b9-c0a8-b1c102849915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.772974] env[62914]: INFO nova.compute.claims [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.862505] env[62914]: DEBUG nova.network.neutron [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Updated VIF entry in instance network info cache for port 38dca680-426a-4e56-834d-e95b4f9d439f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1171.862901] env[62914]: DEBUG nova.network.neutron [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating instance_info_cache with network_info: [{"id": "38dca680-426a-4e56-834d-e95b4f9d439f", "address": "fa:16:3e:f0:f7:5b", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38dca680-42", "ovs_interfaceid": "38dca680-426a-4e56-834d-e95b4f9d439f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.118570] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52935049-593a-19b9-c0a8-b1c102849915, 'name': SearchDatastore_Task, 'duration_secs': 0.026028} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.119253] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.120245] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.120245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.120245] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.120861] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.121238] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b49c19a-1230-4e94-84e5-eda7be4e4785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.131980] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.132622] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1172.135379] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5471950-62b7-4091-b000-f2ce0f1ba382 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.141843] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1172.141843] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf0a7c-a59b-21e1-ffed-fb19bca5fd27" [ 1172.141843] env[62914]: _type = "Task" [ 1172.141843] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.147587] env[62914]: DEBUG nova.network.neutron [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.155666] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bf0a7c-a59b-21e1-ffed-fb19bca5fd27, 'name': SearchDatastore_Task, 'duration_secs': 0.010449} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.157146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.157489] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquired lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.157929] env[62914]: DEBUG nova.network.neutron [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1172.159068] env[62914]: DEBUG nova.objects.instance [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'info_cache' on Instance uuid 9673614c-44c9-4348-b528-0bd28c892a11 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.161253] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b511a0-aa19-4642-837b-071a5b9fa616 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.169039] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1172.169039] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a35a3-1019-7025-fadc-73374211985a" [ 1172.169039] env[62914]: _type = "Task" [ 1172.169039] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.179354] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a35a3-1019-7025-fadc-73374211985a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.366912] env[62914]: DEBUG oslo_concurrency.lockutils [req-1ab7a308-fdc6-4ae2-bf6a-1b1b69070939 req-c81d3d78-c852-4181-9bdf-ffad8a78f1f4 service nova] Releasing lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.650591] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.680260] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]524a35a3-1019-7025-fadc-73374211985a, 'name': SearchDatastore_Task, 'duration_secs': 0.028359} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.680551] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.680811] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cad97698-d68d-42de-a4de-772917e60374/cad97698-d68d-42de-a4de-772917e60374.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1172.681326] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cb25c79-e319-46b5-a0b1-4e3fcf6f16d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.689242] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1172.689242] env[62914]: value = "task-4832793" [ 1172.689242] env[62914]: _type = "Task" [ 1172.689242] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.698969] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.979968] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3097ec62-3a4d-4601-b06a-98df082d4e6b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.988535] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68190823-9c73-4b45-b8f1-bbcc6e2f2de7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.024034] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d30fdda-9b35-4e78-a5c2-46d8cb2b5142 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.034496] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9116ae8c-0aaa-4bb2-b2e5-c57eb2e13c79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.049241] env[62914]: DEBUG nova.compute.provider_tree [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.201354] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499206} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.202163] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] cad97698-d68d-42de-a4de-772917e60374/cad97698-d68d-42de-a4de-772917e60374.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1173.202163] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.203024] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90356323-3342-405f-9d8d-7bc4f56b2085 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.213051] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1173.213051] env[62914]: value = "task-4832794" [ 1173.213051] env[62914]: _type = "Task" [ 1173.213051] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.223777] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.390264] env[62914]: DEBUG nova.network.neutron [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [{"id": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "address": "fa:16:3e:d3:1d:70", "network": {"id": "66a834d1-e7f7-4c84-9280-9c7d12a2ebd4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-894185026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4860bec4a28e4289b7a508f007fff452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6def1a-05", "ovs_interfaceid": "2c6def1a-051c-4671-bee1-4eeefcd24ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.552722] env[62914]: DEBUG nova.scheduler.client.report [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.724828] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097005} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.725214] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.725795] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7221cd62-a512-4708-b84d-f28d0fe3037f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.747676] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] cad97698-d68d-42de-a4de-772917e60374/cad97698-d68d-42de-a4de-772917e60374.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.747978] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc6c69a4-1247-4c38-ade8-27e233e9ef1d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.768561] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1173.768561] env[62914]: value = "task-4832795" [ 1173.768561] env[62914]: _type = "Task" [ 1173.768561] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.777299] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.893073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Releasing lock "refresh_cache-9673614c-44c9-4348-b528-0bd28c892a11" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1173.893397] env[62914]: DEBUG nova.objects.instance [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'migration_context' on Instance uuid 9673614c-44c9-4348-b528-0bd28c892a11 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.057800] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.301s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.060148] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.496s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.061709] env[62914]: INFO nova.compute.claims [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1174.090191] env[62914]: INFO nova.network.neutron [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating port 949a1716-cbb0-44a7-a0f6-4d27a45071e0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1174.169030] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb7e089-4c2d-4b4e-ba09-cadb1246b396 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.188692] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1174.278628] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832795, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.396064] env[62914]: DEBUG nova.objects.base [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Object Instance<9673614c-44c9-4348-b528-0bd28c892a11> lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1174.397131] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c695fa2-1033-451a-8059-8b5f866b4b91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.417296] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-769e39ef-d8e7-406e-ab04-85cc7da5c3c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.423976] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1174.423976] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f63324-e885-768e-de3a-ceb0f2965018" [ 1174.423976] env[62914]: _type = "Task" [ 1174.423976] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.431830] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f63324-e885-768e-de3a-ceb0f2965018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.695060] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1174.695321] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d45125ec-1814-4669-a8ae-19055d2e4503 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.704407] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1174.704407] env[62914]: value = "task-4832796" [ 1174.704407] env[62914]: _type = "Task" [ 1174.704407] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.712354] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832796, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.780974] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832795, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.935877] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52f63324-e885-768e-de3a-ceb0f2965018, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.936218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.115948] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.217663] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832796, 'name': PowerOffVM_Task, 'duration_secs': 0.503063} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.217966] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1175.218176] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1175.257719] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e77ea34-af0f-42d5-8645-f143a3b698d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.266314] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed861f4-b601-45df-94cb-b97534140f49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.301243] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f93eb8-5231-422c-b0c1-bfeef184bece {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.308021] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832795, 'name': ReconfigVM_Task, 'duration_secs': 1.29724} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.308021] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfigured VM instance instance-00000076 to attach disk [datastore2] cad97698-d68d-42de-a4de-772917e60374/cad97698-d68d-42de-a4de-772917e60374.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1175.308709] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c9146b1-8919-497d-bd93-9bc86d4d3f83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.313722] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943d5cfc-1b9c-49cb-9b7c-cf835106c412 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.319402] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1175.319402] env[62914]: value = "task-4832797" [ 1175.319402] env[62914]: _type = "Task" [ 1175.319402] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.330740] env[62914]: DEBUG nova.compute.provider_tree [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.338617] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832797, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.530621] env[62914]: DEBUG nova.compute.manager [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1175.530999] env[62914]: DEBUG oslo_concurrency.lockutils [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.531305] env[62914]: DEBUG oslo_concurrency.lockutils [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.531574] env[62914]: DEBUG oslo_concurrency.lockutils [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.531841] env[62914]: DEBUG nova.compute.manager [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] No waiting events found dispatching network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1175.532122] env[62914]: WARNING nova.compute.manager [req-5b46c6c9-4dc6-4ce7-ad32-12455afb8ce2 req-20c16472-a076-4587-b727-6db8f09f341c service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received unexpected event network-vif-plugged-949a1716-cbb0-44a7-a0f6-4d27a45071e0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1175.567606] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.567787] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1175.625106] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.625346] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.625496] env[62914]: DEBUG nova.network.neutron [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1175.726642] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1175.726824] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1175.727056] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.727383] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1175.727505] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.727696] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1175.727932] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1175.728927] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1175.728927] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1175.728927] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1175.728927] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.734218] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30388940-95a5-451e-b02f-4f81865ecc12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.749696] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1175.749696] env[62914]: value = "task-4832798" [ 1175.749696] env[62914]: _type = "Task" [ 1175.749696] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.758216] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.829916] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832797, 'name': Rename_Task, 'duration_secs': 0.13964} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.830395] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1175.830470] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb26de9e-b447-41ab-bc74-74fa83a83974 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.834401] env[62914]: DEBUG nova.scheduler.client.report [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.839317] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1175.839317] env[62914]: value = "task-4832799" [ 1175.839317] env[62914]: _type = "Task" [ 1175.839317] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.848948] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.071459] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.071627] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.071855] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1176.264401] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832798, 'name': ReconfigVM_Task, 'duration_secs': 0.200481} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.268679] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1176.340972] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.341600] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1176.345799] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.409s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.360265] env[62914]: DEBUG oslo_vmware.api [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832799, 'name': PowerOnVM_Task, 'duration_secs': 0.504696} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.361295] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1176.361576] env[62914]: INFO nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1176.361836] env[62914]: DEBUG nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1176.363067] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62795de-6f5c-46d1-9967-e90630f254c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.401981] env[62914]: DEBUG nova.network.neutron [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.777672] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1176.777950] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1176.778128] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.778326] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1176.778478] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.778633] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1176.778904] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1176.779115] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1176.779298] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1176.779471] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1176.779651] env[62914]: DEBUG nova.virt.hardware [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1176.785059] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfiguring VM instance instance-0000004a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1176.786146] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9a8ee6e-46d4-422f-ac9a-cf17441b1237 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.806855] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1176.806855] env[62914]: value = "task-4832800" [ 1176.806855] env[62914]: _type = "Task" [ 1176.806855] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.818269] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.848334] env[62914]: DEBUG nova.compute.utils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1176.849888] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1176.850125] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1176.884244] env[62914]: INFO nova.compute.manager [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Took 20.21 seconds to build instance. [ 1176.895626] env[62914]: DEBUG nova.policy [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1176.905282] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.935694] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6d331bf1baecfa76cf4532fef2fa51b7',container_format='bare',created_at=2025-11-25T11:32:17Z,direct_url=,disk_format='vmdk',id=0c7a66ae-a3fb-406e-9993-5953aff2f722,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1752465538-shelved',owner='894c73ea90624428afeb1165afbbfa9c',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-11-25T11:32:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1176.936144] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1176.936340] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.936555] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1176.936732] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.936909] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1176.937144] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1176.937312] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1176.937488] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1176.937658] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1176.937842] env[62914]: DEBUG nova.virt.hardware [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1176.938737] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6be974c-7d48-40e0-93d9-1108fda160e3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.951192] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe7a01b-7a7a-4d30-88bd-ff161e1c7aef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.969169] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:83:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '949a1716-cbb0-44a7-a0f6-4d27a45071e0', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1176.976650] env[62914]: DEBUG oslo.service.loopingcall [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1176.979840] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1176.980388] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb0a0dba-479b-46bc-8f04-b64ebdb4a725 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.002332] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.002332] env[62914]: value = "task-4832801" [ 1177.002332] env[62914]: _type = "Task" [ 1177.002332] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.012361] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832801, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.092334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33ac1ba-7ad9-46aa-adba-76493abcc42e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.101259] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c951443-c743-4941-9a28-09115cecc84e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.137464] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2827a6-b415-44e6-93ea-75b613ae669d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.146473] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd92aa3-2c68-472f-a023-1ece69df00b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.164286] env[62914]: DEBUG nova.compute.provider_tree [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.187224] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Successfully created port: bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.318895] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832800, 'name': ReconfigVM_Task, 'duration_secs': 0.195357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.323079] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfigured VM instance instance-0000004a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1177.323079] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8428c10-e7fa-4bf5-a70c-19cd659b54c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.347602] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.348234] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cdd9605-65c6-46ce-aecb-167bfc02246b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.368033] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1177.375607] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1177.375607] env[62914]: value = "task-4832802" [ 1177.375607] env[62914]: _type = "Task" [ 1177.375607] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.386567] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.390278] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f486ddab-70d0-4c26-a245-761a8a940d4b tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.732s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.430248] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.513139] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832801, 'name': CreateVM_Task, 'duration_secs': 0.426644} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.513325] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1177.514072] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.514260] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.514652] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1177.514970] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85a59a8e-bc7c-4151-bfb2-86bb5e17a0ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.520893] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1177.520893] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258f5da-7bc6-1986-7f58-193e52b8bdc9" [ 1177.520893] env[62914]: _type = "Task" [ 1177.520893] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.529464] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5258f5da-7bc6-1986-7f58-193e52b8bdc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.669875] env[62914]: DEBUG nova.scheduler.client.report [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1177.679544] env[62914]: DEBUG nova.compute.manager [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1177.679809] env[62914]: DEBUG nova.compute.manager [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing instance network info cache due to event network-changed-949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1177.680491] env[62914]: DEBUG oslo_concurrency.lockutils [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] Acquiring lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.680491] env[62914]: DEBUG oslo_concurrency.lockutils [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] Acquired lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.680491] env[62914]: DEBUG nova.network.neutron [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Refreshing network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1177.888671] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.932417] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.933518] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 1177.933518] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.933518] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1177.962746] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.962996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.032491] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.032776] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Processing image 0c7a66ae-a3fb-406e-9993-5953aff2f722 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.033020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.033254] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.033374] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.033830] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f1d149c-5dc1-4765-8d81-9f0afadfe038 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.044341] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.044637] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1178.045597] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b78f37a-96a2-4f9d-bd73-e80f3efd1df9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.052182] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1178.052182] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52221d85-e870-e9b4-1ac8-b613759930af" [ 1178.052182] env[62914]: _type = "Task" [ 1178.052182] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.061242] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52221d85-e870-e9b4-1ac8-b613759930af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.383346] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1178.389475] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832802, 'name': ReconfigVM_Task, 'duration_secs': 0.636607} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.389759] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Reconfigured VM instance instance-0000004a to attach disk [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf/af141439-1c36-4184-9775-d1e30ee77ddf.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.390172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1178.411865] env[62914]: DEBUG nova.network.neutron [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updated VIF entry in instance network info cache for port 949a1716-cbb0-44a7-a0f6-4d27a45071e0. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1178.412173] env[62914]: DEBUG nova.network.neutron [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [{"id": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "address": "fa:16:3e:5a:83:3b", "network": {"id": "291932be-19d4-43a6-92e1-e8716f572f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1340034603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "894c73ea90624428afeb1165afbbfa9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap949a1716-cb", "ovs_interfaceid": "949a1716-cbb0-44a7-a0f6-4d27a45071e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.420753] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1178.421077] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1178.421185] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.421381] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1178.421534] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.421705] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1178.422412] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1178.422652] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1178.422784] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1178.422961] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1178.423158] env[62914]: DEBUG nova.virt.hardware [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1178.424112] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f21c52-1fea-4f68-93fc-c93d95203e44 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.433148] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c78e61-1669-4152-b35b-49ee1826cbcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.466656] env[62914]: DEBUG nova.compute.utils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1178.563198] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Preparing fetch location {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1178.563495] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Fetch image to [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7/OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7.vmdk {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1178.563675] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Downloading stream optimized image 0c7a66ae-a3fb-406e-9993-5953aff2f722 to [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7/OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7.vmdk on the data store datastore2 as vApp {{(pid=62914) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1178.563901] env[62914]: DEBUG nova.virt.vmwareapi.images [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Downloading image file data 0c7a66ae-a3fb-406e-9993-5953aff2f722 to the ESX as VM named 'OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7' {{(pid=62914) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1178.567304] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.639706] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1178.639706] env[62914]: value = "resgroup-9" [ 1178.639706] env[62914]: _type = "ResourcePool" [ 1178.639706] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1178.640120] env[62914]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cc7ecf48-fc9a-4659-8b49-43029a9238a5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.674573] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease: (returnval){ [ 1178.674573] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10504-f7b5-9497-a50e-be8ca931b278" [ 1178.674573] env[62914]: _type = "HttpNfcLease" [ 1178.674573] env[62914]: } obtained for vApp import into resource pool (val){ [ 1178.674573] env[62914]: value = "resgroup-9" [ 1178.674573] env[62914]: _type = "ResourcePool" [ 1178.674573] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1178.674920] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the lease: (returnval){ [ 1178.674920] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10504-f7b5-9497-a50e-be8ca931b278" [ 1178.674920] env[62914]: _type = "HttpNfcLease" [ 1178.674920] env[62914]: } to be ready. {{(pid=62914) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1178.683323] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.337s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.686036] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1178.686036] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10504-f7b5-9497-a50e-be8ca931b278" [ 1178.686036] env[62914]: _type = "HttpNfcLease" [ 1178.686036] env[62914]: } is initializing. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1178.728228] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Successfully updated port: bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.898010] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6959bf9a-2800-483f-a456-26f30f407b03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.920118] env[62914]: DEBUG oslo_concurrency.lockutils [req-40e653ac-8933-4dd9-8741-31e697530780 req-051f69eb-e0eb-4812-a205-4ac7513c862a service nova] Releasing lock "refresh_cache-88acf376-122d-4796-8400-dfc4c7ec45d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.921197] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c8629f-0760-4b75-8eb6-cfe88a03e881 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.941328] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1178.969353] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.184900] env[62914]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1179.184900] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10504-f7b5-9497-a50e-be8ca931b278" [ 1179.184900] env[62914]: _type = "HttpNfcLease" [ 1179.184900] env[62914]: } is ready. {{(pid=62914) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1179.185287] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1179.185287] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c10504-f7b5-9497-a50e-be8ca931b278" [ 1179.185287] env[62914]: _type = "HttpNfcLease" [ 1179.185287] env[62914]: }. {{(pid=62914) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1179.186072] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9001f3cf-3859-4db9-a777-ea685d06fe2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.199137] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk from lease info. {{(pid=62914) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1179.199353] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk. {{(pid=62914) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1179.255691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.255836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.256065] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1179.267773] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cad382f7-ff57-463a-9a22-04385bdc06d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.299808] env[62914]: INFO nova.scheduler.client.report [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocation for migration 2877ce41-ea0e-4f50-ba01-f1780ded0468 [ 1179.494442] env[62914]: DEBUG nova.network.neutron [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Port 5d5caccf-1912-40af-a849-900df4764c6f binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1179.562797] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.567460] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.567701] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.696241] env[62914]: DEBUG nova.compute.manager [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Received event network-changed-38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1179.696469] env[62914]: DEBUG nova.compute.manager [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Refreshing instance network info cache due to event network-changed-38dca680-426a-4e56-834d-e95b4f9d439f. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1179.696791] env[62914]: DEBUG oslo_concurrency.lockutils [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] Acquiring lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.696966] env[62914]: DEBUG oslo_concurrency.lockutils [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] Acquired lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.697169] env[62914]: DEBUG nova.network.neutron [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Refreshing network info cache for port 38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1179.795760] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1179.805784] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.226s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.930657] env[62914]: DEBUG nova.compute.manager [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Received event network-vif-plugged-bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1179.931255] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Acquiring lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.931560] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.931679] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.931819] env[62914]: DEBUG nova.compute.manager [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] No waiting events found dispatching network-vif-plugged-bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1179.932012] env[62914]: WARNING nova.compute.manager [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Received unexpected event network-vif-plugged-bca6528d-bcd2-409f-b91d-8d3ceb00d244 for instance with vm_state building and task_state spawning. [ 1179.932211] env[62914]: DEBUG nova.compute.manager [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Received event network-changed-bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1179.932490] env[62914]: DEBUG nova.compute.manager [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Refreshing instance network info cache due to event network-changed-bca6528d-bcd2-409f-b91d-8d3ceb00d244. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1179.932656] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Acquiring lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.977925] env[62914]: DEBUG nova.network.neutron [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updating instance_info_cache with network_info: [{"id": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "address": "fa:16:3e:07:e4:c8", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca6528d-bc", "ovs_interfaceid": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.051883] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.052197] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.052479] env[62914]: INFO nova.compute.manager [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Attaching volume 4ff3c664-12a4-426a-aa86-04f6fb6e9e4a to /dev/sdb [ 1180.071014] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.071258] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.071435] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.071588] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1180.072641] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be3e829-50a6-4c5e-aeda-41d8030f3972 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.083080] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc2eb77-31a3-425a-b49c-2275c52ec44b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.091492] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b86429e-877e-42fb-a221-94b0cc9d7084 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.106963] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ff52c6-2743-4d06-a322-a2ae82a3601e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.113160] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3939d88-55bf-4a01-ac9d-e558c052b879 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.120430] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98eade5d-c201-44d5-88c7-05e2d59d191b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.152150] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178337MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1180.152353] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.152504] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.163297] env[62914]: DEBUG nova.virt.block_device [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating existing volume attachment record: 19639c48-5b5a-4e82-883b-1e7847c7e0e1 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1180.462490] env[62914]: DEBUG nova.network.neutron [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Updated VIF entry in instance network info cache for port 38dca680-426a-4e56-834d-e95b4f9d439f. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1180.462887] env[62914]: DEBUG nova.network.neutron [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating instance_info_cache with network_info: [{"id": "38dca680-426a-4e56-834d-e95b4f9d439f", "address": "fa:16:3e:f0:f7:5b", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38dca680-42", "ovs_interfaceid": "38dca680-426a-4e56-834d-e95b4f9d439f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.480860] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.481293] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Instance network_info: |[{"id": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "address": "fa:16:3e:07:e4:c8", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca6528d-bc", "ovs_interfaceid": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1180.481999] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Acquired lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.482287] env[62914]: DEBUG nova.network.neutron [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Refreshing network info cache for port bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1180.483569] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:e4:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bca6528d-bcd2-409f-b91d-8d3ceb00d244', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.491280] env[62914]: DEBUG oslo.service.loopingcall [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.498176] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1180.499364] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f139da9-205e-40ef-88d4-a927daeb8696 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.540162] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.540586] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.540870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.542433] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.542433] env[62914]: value = "task-4832807" [ 1180.542433] env[62914]: _type = "Task" [ 1180.542433] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.547458] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Completed reading data from the image iterator. {{(pid=62914) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1180.547698] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1180.549211] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771377b0-2a59-4c5a-ac88-82366a643728 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.561788] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832807, 'name': CreateVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.563520] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk is in state: ready. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1180.563701] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk. {{(pid=62914) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1180.563944] env[62914]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9fd4f01a-0141-4225-a7fd-b9d1608acaf4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.756290] env[62914]: DEBUG nova.network.neutron [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updated VIF entry in instance network info cache for port bca6528d-bcd2-409f-b91d-8d3ceb00d244. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1180.756778] env[62914]: DEBUG nova.network.neutron [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updating instance_info_cache with network_info: [{"id": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "address": "fa:16:3e:07:e4:c8", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca6528d-bc", "ovs_interfaceid": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.791040] env[62914]: DEBUG oslo_vmware.rw_handles [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df0ae6-b0c1-fdec-89ec-859dd2016520/disk-0.vmdk. {{(pid=62914) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1180.791215] env[62914]: INFO nova.virt.vmwareapi.images [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Downloaded image file data 0c7a66ae-a3fb-406e-9993-5953aff2f722 [ 1180.792135] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ce7428-54a8-46dd-b74a-a375712f46e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.812952] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-425f3be7-0c3a-475a-bdc6-9ba719781fb7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.845229] env[62914]: INFO nova.virt.vmwareapi.images [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] The imported VM was unregistered [ 1180.847808] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Caching image {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1180.848244] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Creating directory with path [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722 {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1180.848472] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14164fc0-7411-4f4f-8163-32b41327a4b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.861703] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Created directory with path [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722 {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1180.861956] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7/OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7.vmdk to [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk. {{(pid=62914) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1180.862283] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d7fbe68f-8ea4-4540-a325-f9ff32d3d03e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.870774] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1180.870774] env[62914]: value = "task-4832809" [ 1180.870774] env[62914]: _type = "Task" [ 1180.870774] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.881568] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.967337] env[62914]: DEBUG oslo_concurrency.lockutils [req-4d3676d0-e893-4aa9-92a9-6b1410548830 req-6255a36d-e683-42da-8b83-0681fb968b97 service nova] Releasing lock "refresh_cache-cad97698-d68d-42de-a4de-772917e60374" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.058676] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832807, 'name': CreateVM_Task, 'duration_secs': 0.450673} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.059522] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1181.067222] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.067466] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.067876] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1181.068383] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbb58d22-4af7-453e-91e9-8dbec23482d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.074028] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1181.074028] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a9723-c072-4c3f-a991-05425eee0725" [ 1181.074028] env[62914]: _type = "Task" [ 1181.074028] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.082720] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a9723-c072-4c3f-a991-05425eee0725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.084308] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.084546] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.085210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "9673614c-44c9-4348-b528-0bd28c892a11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.085210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.085332] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.087648] env[62914]: INFO nova.compute.manager [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Terminating instance [ 1181.089529] env[62914]: DEBUG nova.compute.manager [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1181.089731] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1181.090601] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e591592-40fb-489c-93e6-3d9d2f088aac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.098726] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1181.099040] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddbce3ad-960f-420d-914c-2904cfad6fa2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.107412] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1181.107412] env[62914]: value = "task-4832810" [ 1181.107412] env[62914]: _type = "Task" [ 1181.107412] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.117692] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832810, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.174789] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Applying migration context for instance af141439-1c36-4184-9775-d1e30ee77ddf as it has an incoming, in-progress migration 7cda217d-33cb-4559-bebb-00862b606af8. Migration status is post-migrating {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1181.176300] env[62914]: INFO nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating resource usage from migration 7cda217d-33cb-4559-bebb-00862b606af8 [ 1181.197926] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198250] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198398] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198517] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198633] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 74e7896c-8a1f-448d-a44b-e6febfff9000 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198747] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance e730b472-fca8-4041-a00c-91bee25232f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.198913] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 9673614c-44c9-4348-b528-0bd28c892a11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.199096] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 88acf376-122d-4796-8400-dfc4c7ec45d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.199241] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance cad97698-d68d-42de-a4de-772917e60374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.199368] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Migration 7cda217d-33cb-4559-bebb-00862b606af8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1181.199483] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance af141439-1c36-4184-9775-d1e30ee77ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.199689] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4b76e4eb-5d56-4eb0-82fc-47661dbc7239 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1181.199954] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1181.200159] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2944MB phys_disk=100GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '11', 'num_vm_active': '8', 'num_task_None': '8', 'num_os_type_None': '11', 'num_proj_d141c01c1d5848eea6ef2b831e431ba5': '2', 'io_workload': '1', 'num_task_resize_migrated': '1', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '1', 'num_proj_d77829ac81cd41f2a4acdd571295ca6d': '3', 'num_vm_rescued': '1', 'num_proj_adf406f1352240aba2338e64b8f182b4': '1', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '2', 'num_proj_894c73ea90624428afeb1165afbbfa9c': '1', 'num_proj_1780142384594b1dabc6811b54144d56': '1', 'num_proj_4860bec4a28e4289b7a508f007fff452': '1', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '1', 'num_vm_building': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1181.262200] env[62914]: DEBUG oslo_concurrency.lockutils [req-596dfeca-7bd9-4f28-96f5-37e64c132cf6 req-152dd3e5-e218-4bc4-bfc1-890de3164089 service nova] Releasing lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.377052] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdc400e-5bad-4ece-b4f8-23acb01cfb87 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.385846] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.388725] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5713b2e-4de8-4032-ade8-feaf16d6a028 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.421996] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8f8b1f-483e-4b42-9c0f-57764ed1500c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.430319] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46d249d-80a8-45b0-b5d5-88342d8c7332 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.444982] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.585761] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.586015] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.586247] env[62914]: DEBUG nova.network.neutron [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1181.592404] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]522a9723-c072-4c3f-a991-05425eee0725, 'name': SearchDatastore_Task, 'duration_secs': 0.032084} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.592404] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.592404] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.592698] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.592743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.592904] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.593197] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4632845-1c82-44eb-abc2-e9c6d448b7e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.617802] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832810, 'name': PowerOffVM_Task, 'duration_secs': 0.380917} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.619379] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1181.619580] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1181.619877] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.620083] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1181.620876] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8260869-0ee9-46f9-b686-96aa64031f79 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.622722] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9542b35-d609-4a36-885f-c7afb0510f91 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.629199] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1181.629199] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce1a35-a289-65fb-d962-55a29b57f3a6" [ 1181.629199] env[62914]: _type = "Task" [ 1181.629199] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.638957] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce1a35-a289-65fb-d962-55a29b57f3a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.714982] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1181.715342] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1181.715549] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleting the datastore file [datastore1] 9673614c-44c9-4348-b528-0bd28c892a11 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1181.715921] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35717c18-3b55-43a6-bcd4-1b80f8470c3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.725225] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for the task: (returnval){ [ 1181.725225] env[62914]: value = "task-4832812" [ 1181.725225] env[62914]: _type = "Task" [ 1181.725225] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.736504] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.885281] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.948592] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1182.145931] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ce1a35-a289-65fb-d962-55a29b57f3a6, 'name': SearchDatastore_Task, 'duration_secs': 0.06725} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.147129] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b66af25-d047-4f80-bf80-4aba813a65c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.154493] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1182.154493] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c09b0-d2c0-56bd-06ab-90f8fef10846" [ 1182.154493] env[62914]: _type = "Task" [ 1182.154493] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.168131] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c09b0-d2c0-56bd-06ab-90f8fef10846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.240014] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.377934] env[62914]: DEBUG nova.network.neutron [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.387828] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.453918] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1182.454190] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.302s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.670670] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525c09b0-d2c0-56bd-06ab-90f8fef10846, 'name': SearchDatastore_Task, 'duration_secs': 0.100417} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.671158] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.671564] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4b76e4eb-5d56-4eb0-82fc-47661dbc7239/4b76e4eb-5d56-4eb0-82fc-47661dbc7239.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1182.671978] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ad3a978-96b4-4036-ae7f-73e602ca7f16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.685169] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1182.685169] env[62914]: value = "task-4832814" [ 1182.685169] env[62914]: _type = "Task" [ 1182.685169] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.697358] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.740413] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.881599] env[62914]: DEBUG oslo_concurrency.lockutils [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.889931] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.198377] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.239505] env[62914]: DEBUG oslo_vmware.api [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Task: {'id': task-4832812, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.118107} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.239864] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1183.240104] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1183.240328] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1183.240570] env[62914]: INFO nova.compute.manager [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1183.240845] env[62914]: DEBUG oslo.service.loopingcall [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1183.241130] env[62914]: DEBUG nova.compute.manager [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1183.241244] env[62914]: DEBUG nova.network.neutron [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1183.387946] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.419901] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2a002e-c6ae-4878-87ae-fd5817108a23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.442151] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997f9b8c-2941-45bc-9384-e1a4c7a1dbf0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.452258] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1183.459272] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.460093] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.608933] env[62914]: DEBUG nova.compute.manager [req-21615324-8e85-4d2a-a5fd-e38cfe8f549c req-8bed3b48-9752-424d-9741-5fd41f3bee7b service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Received event network-vif-deleted-2c6def1a-051c-4671-bee1-4eeefcd24ae3 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1183.609416] env[62914]: INFO nova.compute.manager [req-21615324-8e85-4d2a-a5fd-e38cfe8f549c req-8bed3b48-9752-424d-9741-5fd41f3bee7b service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Neutron deleted interface 2c6def1a-051c-4671-bee1-4eeefcd24ae3; detaching it from the instance and deleting it from the info cache [ 1183.609655] env[62914]: DEBUG nova.network.neutron [req-21615324-8e85-4d2a-a5fd-e38cfe8f549c req-8bed3b48-9752-424d-9741-5fd41f3bee7b service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.700038] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.885462] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.962861] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1183.963232] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c90bab8-ab86-4efb-bd85-37bf2a4337e6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.971894] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1183.971894] env[62914]: value = "task-4832815" [ 1183.971894] env[62914]: _type = "Task" [ 1183.971894] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.981464] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.082197] env[62914]: DEBUG nova.network.neutron [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.113420] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbd753de-6a16-421d-baae-4e1c6c2a2d40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.128533] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf610e1-3985-4877-a38b-a8bc2499c59a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.164954] env[62914]: DEBUG nova.compute.manager [req-21615324-8e85-4d2a-a5fd-e38cfe8f549c req-8bed3b48-9752-424d-9741-5fd41f3bee7b service nova] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Detach interface failed, port_id=2c6def1a-051c-4671-bee1-4eeefcd24ae3, reason: Instance 9673614c-44c9-4348-b528-0bd28c892a11 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1184.199017] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832814, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.388152] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832809, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.234993} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.388601] env[62914]: INFO nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7/OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7.vmdk to [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk. [ 1184.388904] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Cleaning up location [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7 {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1184.389192] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_d009f537-7a15-42a0-855a-c13411d7ccf7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.389552] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e68d4e54-682e-4b31-8518-28702cab790c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.399727] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1184.399727] env[62914]: value = "task-4832816" [ 1184.399727] env[62914]: _type = "Task" [ 1184.399727] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.409714] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.482078] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.585158] env[62914]: INFO nova.compute.manager [-] [instance: 9673614c-44c9-4348-b528-0bd28c892a11] Took 1.34 seconds to deallocate network for instance. [ 1184.699857] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832814, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.824395} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.700136] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] 4b76e4eb-5d56-4eb0-82fc-47661dbc7239/4b76e4eb-5d56-4eb0-82fc-47661dbc7239.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1184.700372] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1184.700656] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f6470c0-5d80-4a21-8ba4-448715f0b566 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.708416] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1184.708416] env[62914]: value = "task-4832817" [ 1184.708416] env[62914]: _type = "Task" [ 1184.708416] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.712813] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1184.713074] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942097', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'name': 'volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74e7896c-8a1f-448d-a44b-e6febfff9000', 'attached_at': '', 'detached_at': '', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'serial': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1184.713943] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8ca514-f72b-47b7-aac9-785421f0b4c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.723120] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.734556] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5669b7c7-b43b-4c9c-a751-a0e1b604998c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.764668] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a/volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.765484] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a2cad03-e496-4308-a41c-354af22b2209 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.784768] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1184.784768] env[62914]: value = "task-4832818" [ 1184.784768] env[62914]: _type = "Task" [ 1184.784768] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.794546] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.910916] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127611} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.911229] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.911410] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.911672] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk to [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1184.911972] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b529c245-d728-42c2-aef2-557322a1b9e0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.920393] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1184.920393] env[62914]: value = "task-4832819" [ 1184.920393] env[62914]: _type = "Task" [ 1184.920393] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.929161] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.981911] env[62914]: DEBUG oslo_vmware.api [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832815, 'name': PowerOnVM_Task, 'duration_secs': 0.791162} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.982213] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1184.982415] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-787a8578-a093-446e-94d1-3e9dbd691771 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance 'af141439-1c36-4184-9775-d1e30ee77ddf' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1185.094156] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.094570] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.094927] env[62914]: DEBUG nova.objects.instance [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lazy-loading 'resources' on Instance uuid 9673614c-44c9-4348-b528-0bd28c892a11 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.219764] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238199} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.220099] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1185.220941] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f7ed3b-9cab-47ab-9474-ff914a89c291 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.245327] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 4b76e4eb-5d56-4eb0-82fc-47661dbc7239/4b76e4eb-5d56-4eb0-82fc-47661dbc7239.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1185.245630] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92fd3f95-c6c0-41d6-8bcf-9945de0b1646 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.266946] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1185.266946] env[62914]: value = "task-4832820" [ 1185.266946] env[62914]: _type = "Task" [ 1185.266946] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.276033] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.295640] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.431172] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.778326] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.780253] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3acd0bf-73a6-48d0-a9c5-d37dca7bb364 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.794250] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407bce21-2804-45df-a6e8-a9a04e408324 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.803376] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832818, 'name': ReconfigVM_Task, 'duration_secs': 0.769198} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.828599] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a/volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.833778] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b59e274a-3dde-4fee-9c78-345b9fc4037c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.845401] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f194145-1d3b-49ee-96c6-8915358c99b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.854486] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0846d617-f1be-4c26-9ce2-7bd8863e96a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.860213] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1185.860213] env[62914]: value = "task-4832821" [ 1185.860213] env[62914]: _type = "Task" [ 1185.860213] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.872359] env[62914]: DEBUG nova.compute.provider_tree [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.882186] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.937428] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.279571] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.374077] env[62914]: DEBUG oslo_vmware.api [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832821, 'name': ReconfigVM_Task, 'duration_secs': 0.175217} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.374469] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942097', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'name': 'volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74e7896c-8a1f-448d-a44b-e6febfff9000', 'attached_at': '', 'detached_at': '', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'serial': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1186.376652] env[62914]: DEBUG nova.scheduler.client.report [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1186.438350] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.782893] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.883878] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.919135] env[62914]: INFO nova.scheduler.client.report [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Deleted allocations for instance 9673614c-44c9-4348-b528-0bd28c892a11 [ 1186.937640] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.012869] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.013293] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.013517] env[62914]: DEBUG nova.compute.manager [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Going to confirm migration 8 {{(pid=62914) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1187.282162] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832820, 'name': ReconfigVM_Task, 'duration_secs': 1.699042} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.282551] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 4b76e4eb-5d56-4eb0-82fc-47661dbc7239/4b76e4eb-5d56-4eb0-82fc-47661dbc7239.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.283256] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13515526-0830-479b-b909-7b1c579469ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.296321] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1187.296321] env[62914]: value = "task-4832822" [ 1187.296321] env[62914]: _type = "Task" [ 1187.296321] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.307593] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832822, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.437579] env[62914]: DEBUG nova.objects.instance [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.439142] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.439660] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bfabbd33-5e5d-465b-9062-95b09fda4f2b tempest-DeleteServersTestJSON-844183300 tempest-DeleteServersTestJSON-844183300-project-member] Lock "9673614c-44c9-4348-b528-0bd28c892a11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.355s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.593235] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.593436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.593628] env[62914]: DEBUG nova.network.neutron [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1187.593825] env[62914]: DEBUG nova.objects.instance [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'info_cache' on Instance uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.813350] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832822, 'name': Rename_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.934669] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832819, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.811251} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.934975] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0c7a66ae-a3fb-406e-9993-5953aff2f722/0c7a66ae-a3fb-406e-9993-5953aff2f722.vmdk to [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1187.936279] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa1a6ed-e111-4c93-921b-f402144491d5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.965613] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.966581] env[62914]: DEBUG oslo_concurrency.lockutils [None req-5ffb80ec-6a30-4532-b20e-0d541916e564 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.914s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.967897] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb3d31fc-8c7a-43a5-85ac-567c2e59c9f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.991786] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1187.991786] env[62914]: value = "task-4832824" [ 1187.991786] env[62914]: _type = "Task" [ 1187.991786] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.001833] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.307182] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832822, 'name': Rename_Task, 'duration_secs': 0.544083} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.307602] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1188.307757] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97bf02e3-6eac-4919-8a06-2948f26f0342 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.314551] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1188.314551] env[62914]: value = "task-4832825" [ 1188.314551] env[62914]: _type = "Task" [ 1188.314551] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.323333] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.501563] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.501831] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.502077] env[62914]: DEBUG nova.compute.manager [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1188.502417] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.503216] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79e1f66-8a82-4d1e-b1bf-1cfded3bb1fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.510135] env[62914]: DEBUG nova.compute.manager [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1188.510789] env[62914]: DEBUG nova.objects.instance [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.820995] env[62914]: DEBUG nova.network.neutron [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [{"id": "5d5caccf-1912-40af-a849-900df4764c6f", "address": "fa:16:3e:9e:81:d9", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5caccf-19", "ovs_interfaceid": "5d5caccf-1912-40af-a849-900df4764c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.830015] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832825, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.938648] env[62914]: DEBUG oslo_concurrency.lockutils [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.938958] env[62914]: DEBUG oslo_concurrency.lockutils [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.002559] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.016095] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1189.016350] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88c38465-fc98-4914-ba87-41c99610380e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.023635] env[62914]: DEBUG oslo_vmware.api [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1189.023635] env[62914]: value = "task-4832826" [ 1189.023635] env[62914]: _type = "Task" [ 1189.023635] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.035367] env[62914]: DEBUG oslo_vmware.api [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.327738] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832825, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.331567] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-af141439-1c36-4184-9775-d1e30ee77ddf" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.331800] env[62914]: DEBUG nova.objects.instance [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'migration_context' on Instance uuid af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.443030] env[62914]: INFO nova.compute.manager [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Detaching volume 548d77fc-3693-4e6f-8097-f2402ca0f874 [ 1189.481328] env[62914]: INFO nova.virt.block_device [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Attempting to driver detach volume 548d77fc-3693-4e6f-8097-f2402ca0f874 from mountpoint /dev/sdb [ 1189.481569] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1189.481763] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942083', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'name': 'volume-548d77fc-3693-4e6f-8097-f2402ca0f874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a', 'attached_at': '', 'detached_at': '', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'serial': '548d77fc-3693-4e6f-8097-f2402ca0f874'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1189.482765] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdca9ca6-f70a-4018-8fea-f18faf51b1fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.508844] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65039f7-a825-42d1-96cf-08a5c3e27635 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.518010] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832824, 'name': ReconfigVM_Task, 'duration_secs': 1.352309} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.518867] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9766bf-9dda-47a5-9dae-592cb401933f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.521110] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7/88acf376-122d-4796-8400-dfc4c7ec45d7.vmdk or device None with type streamOptimized {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.522161] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf224351-8717-49d9-9c30-6053bd049b40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.544939] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1189.544939] env[62914]: value = "task-4832827" [ 1189.544939] env[62914]: _type = "Task" [ 1189.544939] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.546401] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ea29b2-75cb-4b1b-97b7-2f4436372255 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.555073] env[62914]: DEBUG oslo_vmware.api [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832826, 'name': PowerOffVM_Task, 'duration_secs': 0.208911} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.555832] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1189.556120] env[62914]: DEBUG nova.compute.manager [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1189.557324] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7505b298-582b-4c5a-8d1f-a8f3c25d766a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.574772] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832827, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.574868] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] The volume has not been displaced from its original location: [datastore1] volume-548d77fc-3693-4e6f-8097-f2402ca0f874/volume-548d77fc-3693-4e6f-8097-f2402ca0f874.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1189.580145] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1189.580900] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32425d88-567b-4b1d-8c75-1805ac5510f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.603428] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1189.603428] env[62914]: value = "task-4832828" [ 1189.603428] env[62914]: _type = "Task" [ 1189.603428] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.615299] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832828, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.828063] env[62914]: DEBUG oslo_vmware.api [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832825, 'name': PowerOnVM_Task, 'duration_secs': 1.152078} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.828306] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1189.828462] env[62914]: INFO nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Took 11.44 seconds to spawn the instance on the hypervisor. [ 1189.828661] env[62914]: DEBUG nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1189.829521] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78d9ae2-5928-4776-9062-6cfdaa55caa7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.834378] env[62914]: DEBUG nova.objects.base [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1189.836570] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5feb26e-3a45-4f3a-9370-66761a7fb698 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.857530] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8427ab16-9b76-4683-ba6f-7333c4169a7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.863271] env[62914]: DEBUG oslo_vmware.api [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1189.863271] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5242a38b-5681-8d98-610d-7d7c511770fc" [ 1189.863271] env[62914]: _type = "Task" [ 1189.863271] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.871372] env[62914]: DEBUG oslo_vmware.api [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5242a38b-5681-8d98-610d-7d7c511770fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.056026] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832827, 'name': Rename_Task, 'duration_secs': 0.150329} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.056268] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1190.056508] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d007ded3-06f1-4f6e-9cf7-c33b6a238046 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.063264] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1190.063264] env[62914]: value = "task-4832829" [ 1190.063264] env[62914]: _type = "Task" [ 1190.063264] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.071863] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.102688] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e2e24728-461c-4354-b4ac-842b7bfc4630 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.601s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.113559] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832828, 'name': ReconfigVM_Task, 'duration_secs': 0.333392} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.113821] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1190.118995] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62a71b2f-91bd-491b-90df-c40395c81ee0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.136461] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1190.136461] env[62914]: value = "task-4832830" [ 1190.136461] env[62914]: _type = "Task" [ 1190.136461] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.145822] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832830, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.353554] env[62914]: INFO nova.compute.manager [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Took 24.80 seconds to build instance. [ 1190.375250] env[62914]: DEBUG oslo_vmware.api [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5242a38b-5681-8d98-610d-7d7c511770fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008697} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.375585] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.375817] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.495930] env[62914]: DEBUG nova.compute.manager [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Received event network-changed-bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1190.496173] env[62914]: DEBUG nova.compute.manager [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Refreshing instance network info cache due to event network-changed-bca6528d-bcd2-409f-b91d-8d3ceb00d244. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1190.496446] env[62914]: DEBUG oslo_concurrency.lockutils [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] Acquiring lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.496607] env[62914]: DEBUG oslo_concurrency.lockutils [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] Acquired lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.496781] env[62914]: DEBUG nova.network.neutron [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Refreshing network info cache for port bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1190.574780] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832829, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.650889] env[62914]: DEBUG oslo_vmware.api [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832830, 'name': ReconfigVM_Task, 'duration_secs': 0.220101} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.651380] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942083', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'name': 'volume-548d77fc-3693-4e6f-8097-f2402ca0f874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a', 'attached_at': '', 'detached_at': '', 'volume_id': '548d77fc-3693-4e6f-8097-f2402ca0f874', 'serial': '548d77fc-3693-4e6f-8097-f2402ca0f874'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1190.855218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7fccadc6-71bd-40aa-af98-1032166ba047 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.311s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.956010] env[62914]: DEBUG nova.objects.instance [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.059150] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b063ab7-1c54-4f63-bafd-3553b1d4f27f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.072289] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5d9183-bfe9-44ff-bbf8-c3da51c106f7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.079524] env[62914]: DEBUG oslo_vmware.api [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832829, 'name': PowerOnVM_Task, 'duration_secs': 0.599867} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.080135] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1191.110783] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b959ee03-7628-4ced-8beb-377ba9649e24 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.120011] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a82b97e-1126-4874-a482-936b031a1362 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.143443] env[62914]: DEBUG nova.compute.provider_tree [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.205246] env[62914]: DEBUG nova.objects.instance [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'flavor' on Instance uuid f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.217201] env[62914]: DEBUG nova.compute.manager [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1191.218131] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2265eec7-5870-439f-8c12-bb87b0711a65 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.302815] env[62914]: DEBUG nova.network.neutron [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updated VIF entry in instance network info cache for port bca6528d-bcd2-409f-b91d-8d3ceb00d244. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1191.303247] env[62914]: DEBUG nova.network.neutron [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updating instance_info_cache with network_info: [{"id": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "address": "fa:16:3e:07:e4:c8", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca6528d-bc", "ovs_interfaceid": "bca6528d-bcd2-409f-b91d-8d3ceb00d244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.460900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.461276] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.461276] env[62914]: DEBUG nova.network.neutron [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1191.461447] env[62914]: DEBUG nova.objects.instance [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'info_cache' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.652024] env[62914]: DEBUG nova.scheduler.client.report [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1191.742976] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a3f69ed8-941a-457c-b5b1-165e3707521b tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.387s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.807807] env[62914]: DEBUG oslo_concurrency.lockutils [req-64b8ba67-c295-43b9-adc2-56c8b31b7011 req-d1801827-ca4e-479c-a14c-294aa1dbea3f service nova] Releasing lock "refresh_cache-4b76e4eb-5d56-4eb0-82fc-47661dbc7239" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.964914] env[62914]: DEBUG nova.objects.base [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Object Instance<74e7896c-8a1f-448d-a44b-e6febfff9000> lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1192.217914] env[62914]: DEBUG oslo_concurrency.lockutils [None req-304a59c9-27d5-47de-be03-8fe2ffdc8053 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.279s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.663303] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.286s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.808230] env[62914]: DEBUG nova.network.neutron [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.227272] env[62914]: INFO nova.scheduler.client.report [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted allocation for migration 7cda217d-33cb-4559-bebb-00862b606af8 [ 1193.311339] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.466842] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.466984] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.467338] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.467532] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.467758] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.469662] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.469965] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.470247] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.470603] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.470775] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.473027] env[62914]: INFO nova.compute.manager [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Terminating instance [ 1193.474587] env[62914]: INFO nova.compute.manager [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Terminating instance [ 1193.476152] env[62914]: DEBUG nova.compute.manager [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1193.476381] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1193.477387] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7cdf48-edba-4e66-ac6e-668ebf07df08 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.480601] env[62914]: DEBUG nova.compute.manager [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1193.480786] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1193.481557] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565c4de2-af59-4a0a-b182-f762d0d0242f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.489934] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1193.492123] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-129d1141-9b9f-480e-a31c-58d5b7013845 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.493727] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1193.493966] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6716fc48-3b3a-45e8-9d3c-16ed9fdafa2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.500570] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1193.500570] env[62914]: value = "task-4832833" [ 1193.500570] env[62914]: _type = "Task" [ 1193.500570] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.501878] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1193.501878] env[62914]: value = "task-4832832" [ 1193.501878] env[62914]: _type = "Task" [ 1193.501878] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.515721] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.519475] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832832, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.733792] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0bf51a96-98a5-4ec8-bce4-e546935312ef tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.720s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.815561] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1193.815970] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40aca1a4-3732-49cb-9f0f-9b7355ae447a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.824263] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1193.824263] env[62914]: value = "task-4832834" [ 1193.824263] env[62914]: _type = "Task" [ 1193.824263] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.837892] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832834, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.012921] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832833, 'name': PowerOffVM_Task, 'duration_secs': 0.328327} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.013579] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1194.013856] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1194.014123] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52c390e2-3ff6-46ad-a0ad-e09b1a749d49 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.019585] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832832, 'name': PowerOffVM_Task, 'duration_secs': 0.313934} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.020355] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1194.020458] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1194.020734] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52999b81-e2d4-42c3-85ef-6c5373b95f34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.117031] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1194.117407] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1194.117536] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleting the datastore file [datastore2] 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.117897] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6106fd53-3abf-4ed2-bd6c-951317d217e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.121365] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1194.121649] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1194.122063] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleting the datastore file [datastore1] f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.122699] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-939fdf7c-2fb1-4d47-af7a-609be2648250 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.128350] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for the task: (returnval){ [ 1194.128350] env[62914]: value = "task-4832837" [ 1194.128350] env[62914]: _type = "Task" [ 1194.128350] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.136732] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1194.136732] env[62914]: value = "task-4832838" [ 1194.136732] env[62914]: _type = "Task" [ 1194.136732] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.148564] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.158202] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.336345] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832834, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.456163] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.456587] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.456875] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.457165] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.457436] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.459942] env[62914]: INFO nova.compute.manager [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Terminating instance [ 1194.462466] env[62914]: DEBUG nova.compute.manager [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1194.462745] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1194.463653] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97333545-a8d8-452f-99fa-d5339656c626 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.471988] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1194.472280] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b58298b-0c83-4001-9cbf-b5cab9323ac4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.479290] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1194.479290] env[62914]: value = "task-4832840" [ 1194.479290] env[62914]: _type = "Task" [ 1194.479290] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.488828] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.640938] env[62914]: DEBUG oslo_vmware.api [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Task: {'id': task-4832837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276956} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.644199] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.644449] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1194.644719] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1194.644831] env[62914]: INFO nova.compute.manager [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1194.645079] env[62914]: DEBUG oslo.service.loopingcall [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.645345] env[62914]: DEBUG nova.compute.manager [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1194.645452] env[62914]: DEBUG nova.network.neutron [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1194.652525] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.836553] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832834, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.991247] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832840, 'name': PowerOffVM_Task, 'duration_secs': 0.425492} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.991580] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1194.991814] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1194.992069] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-391ba00b-9fb4-448d-9bec-913f34d46159 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.084861] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1195.085585] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1195.085585] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleting the datastore file [datastore2] af141439-1c36-4184-9775-d1e30ee77ddf {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.085800] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31d37b6f-8c69-4b60-bba7-1ba7f1aa60a9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.092699] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1195.092699] env[62914]: value = "task-4832842" [ 1195.092699] env[62914]: _type = "Task" [ 1195.092699] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.101518] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.151624] env[62914]: DEBUG oslo_vmware.api [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.748861} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.151624] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.151624] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1195.151624] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1195.151624] env[62914]: INFO nova.compute.manager [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1195.151805] env[62914]: DEBUG oslo.service.loopingcall [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1195.152041] env[62914]: DEBUG nova.compute.manager [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1195.152149] env[62914]: DEBUG nova.network.neutron [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1195.338144] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832834, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.379628] env[62914]: DEBUG nova.compute.manager [req-71ffb6bb-7a65-4df9-a7fc-1dfaed9c5c38 req-07b7479f-b15b-401d-baaf-ec94ef50bc10 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Received event network-vif-deleted-949a1716-cbb0-44a7-a0f6-4d27a45071e0 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1195.379628] env[62914]: INFO nova.compute.manager [req-71ffb6bb-7a65-4df9-a7fc-1dfaed9c5c38 req-07b7479f-b15b-401d-baaf-ec94ef50bc10 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Neutron deleted interface 949a1716-cbb0-44a7-a0f6-4d27a45071e0; detaching it from the instance and deleting it from the info cache [ 1195.379628] env[62914]: DEBUG nova.network.neutron [req-71ffb6bb-7a65-4df9-a7fc-1dfaed9c5c38 req-07b7479f-b15b-401d-baaf-ec94ef50bc10 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.610672] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.788913] env[62914]: DEBUG nova.network.neutron [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.818207] env[62914]: DEBUG nova.compute.manager [req-e80fe605-ccef-45ae-bbda-0b233d16f435 req-2ee79dcb-61a2-496c-8a1a-73cfb2a78ccb service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Received event network-vif-deleted-d3377942-1cea-43ef-8a80-ebe5519d491c {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1195.818207] env[62914]: INFO nova.compute.manager [req-e80fe605-ccef-45ae-bbda-0b233d16f435 req-2ee79dcb-61a2-496c-8a1a-73cfb2a78ccb service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Neutron deleted interface d3377942-1cea-43ef-8a80-ebe5519d491c; detaching it from the instance and deleting it from the info cache [ 1195.818817] env[62914]: DEBUG nova.network.neutron [req-e80fe605-ccef-45ae-bbda-0b233d16f435 req-2ee79dcb-61a2-496c-8a1a-73cfb2a78ccb service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.842058] env[62914]: DEBUG oslo_vmware.api [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832834, 'name': PowerOnVM_Task, 'duration_secs': 1.646401} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.842058] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1195.842460] env[62914]: DEBUG nova.compute.manager [None req-f5361afd-be56-481c-8cfd-b8ad8282bee8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1195.843212] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11acc2c9-8e12-454e-876b-8690d935f356 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.882487] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61079692-db71-4592-908c-513217b1ddf7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.893683] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9581adf4-2540-4251-b73b-20341c123bca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.930273] env[62914]: DEBUG nova.compute.manager [req-71ffb6bb-7a65-4df9-a7fc-1dfaed9c5c38 req-07b7479f-b15b-401d-baaf-ec94ef50bc10 service nova] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Detach interface failed, port_id=949a1716-cbb0-44a7-a0f6-4d27a45071e0, reason: Instance 88acf376-122d-4796-8400-dfc4c7ec45d7 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1196.109246] env[62914]: DEBUG oslo_vmware.api [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519977} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.109246] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.109246] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1196.109246] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1196.109246] env[62914]: INFO nova.compute.manager [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1196.109246] env[62914]: DEBUG oslo.service.loopingcall [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.109246] env[62914]: DEBUG nova.compute.manager [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1196.109246] env[62914]: DEBUG nova.network.neutron [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1196.296934] env[62914]: INFO nova.compute.manager [-] [instance: 88acf376-122d-4796-8400-dfc4c7ec45d7] Took 1.65 seconds to deallocate network for instance. [ 1196.297365] env[62914]: DEBUG nova.network.neutron [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.322772] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d7ff121-9a27-4b1a-9603-6351da624519 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.335751] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f247b13-486c-418e-9549-877ff9b09a39 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.375280] env[62914]: DEBUG nova.compute.manager [req-e80fe605-ccef-45ae-bbda-0b233d16f435 req-2ee79dcb-61a2-496c-8a1a-73cfb2a78ccb service nova] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Detach interface failed, port_id=d3377942-1cea-43ef-8a80-ebe5519d491c, reason: Instance f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1196.802171] env[62914]: INFO nova.compute.manager [-] [instance: f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a] Took 1.65 seconds to deallocate network for instance. [ 1196.810488] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.811182] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.811600] env[62914]: DEBUG nova.objects.instance [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lazy-loading 'resources' on Instance uuid 88acf376-122d-4796-8400-dfc4c7ec45d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.205060] env[62914]: DEBUG nova.network.neutron [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.314932] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.504053] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a977fa9-ab80-4e62-99ee-6dd5edb1fb9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.514802] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349db392-34c9-43d1-a548-9850783964d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.546907] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661b5c81-1ddd-43d6-a549-70503686686a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.554860] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f0fb9f-7f79-4bc5-b629-ef1013008e70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.570618] env[62914]: DEBUG nova.compute.provider_tree [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.706607] env[62914]: INFO nova.compute.manager [-] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Took 1.60 seconds to deallocate network for instance. [ 1197.773684] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.773957] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.774222] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "e730b472-fca8-4041-a00c-91bee25232f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.774424] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.774607] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.776991] env[62914]: INFO nova.compute.manager [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Terminating instance [ 1197.779068] env[62914]: DEBUG nova.compute.manager [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1197.779276] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1197.780368] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdc27fe-ba1d-46e5-a521-848e37f80245 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.789277] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1197.789559] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f5a0149-e6f1-4184-a4b2-86efffdfb98e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.797740] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1197.797740] env[62914]: value = "task-4832844" [ 1197.797740] env[62914]: _type = "Task" [ 1197.797740] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.808764] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.857675] env[62914]: DEBUG nova.compute.manager [req-8a8bed43-019c-4961-a23b-b8ba24f20183 req-4f8d53c5-b702-483d-a7ee-2d705e2084a9 service nova] [instance: af141439-1c36-4184-9775-d1e30ee77ddf] Received event network-vif-deleted-5d5caccf-1912-40af-a849-900df4764c6f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1198.073552] env[62914]: DEBUG nova.scheduler.client.report [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1198.214727] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.308778] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832844, 'name': PowerOffVM_Task, 'duration_secs': 0.310485} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.309115] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1198.309300] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1198.309570] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd42a382-5c6f-4622-ac83-78b0fde9760b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.379755] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1198.379935] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1198.380136] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleting the datastore file [datastore1] e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.380428] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba69389f-4f61-4aee-b694-d1dac6dadadd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.389215] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for the task: (returnval){ [ 1198.389215] env[62914]: value = "task-4832846" [ 1198.389215] env[62914]: _type = "Task" [ 1198.389215] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.397959] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.582266] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.771s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.585912] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.270s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.589767] env[62914]: DEBUG nova.objects.instance [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'resources' on Instance uuid f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.618955] env[62914]: INFO nova.scheduler.client.report [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Deleted allocations for instance 88acf376-122d-4796-8400-dfc4c7ec45d7 [ 1198.903989] env[62914]: DEBUG oslo_vmware.api [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Task: {'id': task-4832846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31996} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.904476] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.904727] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1198.905126] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1198.905385] env[62914]: INFO nova.compute.manager [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1198.905681] env[62914]: DEBUG oslo.service.loopingcall [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.905920] env[62914]: DEBUG nova.compute.manager [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1198.906030] env[62914]: DEBUG nova.network.neutron [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1199.129135] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1289325-f66f-46da-93e3-d239ece866de tempest-ServerActionsTestOtherB-1631848678 tempest-ServerActionsTestOtherB-1631848678-project-member] Lock "88acf376-122d-4796-8400-dfc4c7ec45d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.658s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.281990] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7ee3c3-d836-4391-82ec-29968dc4862d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.294649] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9409c37-7a79-4976-b021-faf3156f4830 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.344110] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebe0259-3b56-4c4b-a539-2b0208db108a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.352800] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed3130c-c5ca-41bf-9ccc-965747b8ea78 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.367615] env[62914]: DEBUG nova.compute.provider_tree [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.871756] env[62914]: DEBUG nova.scheduler.client.report [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1199.925303] env[62914]: DEBUG nova.compute.manager [req-f0f0965e-838d-4391-a625-b72ee4d07199 req-d4547a71-7879-4f17-a08d-59e9cc4c8e99 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Received event network-vif-deleted-8d6d259f-1ebc-4e49-b6f8-114f414606f7 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1199.926023] env[62914]: INFO nova.compute.manager [req-f0f0965e-838d-4391-a625-b72ee4d07199 req-d4547a71-7879-4f17-a08d-59e9cc4c8e99 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Neutron deleted interface 8d6d259f-1ebc-4e49-b6f8-114f414606f7; detaching it from the instance and deleting it from the info cache [ 1199.926023] env[62914]: DEBUG nova.network.neutron [req-f0f0965e-838d-4391-a625-b72ee4d07199 req-d4547a71-7879-4f17-a08d-59e9cc4c8e99 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.950075] env[62914]: DEBUG nova.network.neutron [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.378994] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.381536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.167s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.381785] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.410837] env[62914]: INFO nova.scheduler.client.report [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted allocations for instance f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a [ 1200.421155] env[62914]: INFO nova.scheduler.client.report [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted allocations for instance af141439-1c36-4184-9775-d1e30ee77ddf [ 1200.430320] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bb1903d-17f2-4974-b8f3-71479bb7dd7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.452362] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8f4131-eddd-4312-adbe-cc4fc6613fcc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.470861] env[62914]: INFO nova.compute.manager [-] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Took 1.56 seconds to deallocate network for instance. [ 1200.507031] env[62914]: DEBUG nova.compute.manager [req-f0f0965e-838d-4391-a625-b72ee4d07199 req-d4547a71-7879-4f17-a08d-59e9cc4c8e99 service nova] [instance: e730b472-fca8-4041-a00c-91bee25232f7] Detach interface failed, port_id=8d6d259f-1ebc-4e49-b6f8-114f414606f7, reason: Instance e730b472-fca8-4041-a00c-91bee25232f7 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1200.922201] env[62914]: DEBUG oslo_concurrency.lockutils [None req-528c8d7d-45a7-4afe-b93f-e70d03a889af tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "f05f8fb4-0994-4f57-9d2b-3d491d3e9a0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.455s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.932966] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f53f430e-973a-4904-be7c-8f073bf67429 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "af141439-1c36-4184-9775-d1e30ee77ddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.476s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.981387] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.981838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.982157] env[62914]: DEBUG nova.objects.instance [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lazy-loading 'resources' on Instance uuid e730b472-fca8-4041-a00c-91bee25232f7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.041330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.041607] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.546856] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1201.667032] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf48f1-8d9e-4582-9785-09ca4874af5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.677891] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7981d6ab-8e1e-4254-9685-de5e2be200d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.726032] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3372dc-eff4-4a38-beb8-eb6e0a2180c8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.734718] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628f898c-5a16-414c-85cc-8638ddbecb2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.751833] env[62914]: DEBUG nova.compute.provider_tree [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.068195] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.216589] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.216942] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.254599] env[62914]: DEBUG nova.scheduler.client.report [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1202.727028] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1202.764023] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.768316] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.699s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.773472] env[62914]: INFO nova.compute.claims [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1202.827819] env[62914]: INFO nova.scheduler.client.report [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Deleted allocations for instance e730b472-fca8-4041-a00c-91bee25232f7 [ 1203.257947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.337136] env[62914]: DEBUG oslo_concurrency.lockutils [None req-7296af3b-cd97-4773-86ce-834e6918e787 tempest-AttachVolumeShelveTestJSON-1170767985 tempest-AttachVolumeShelveTestJSON-1170767985-project-member] Lock "e730b472-fca8-4041-a00c-91bee25232f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.563s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.957584] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad92171b-640f-4674-8aa0-0929470ad91c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.966426] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040141db-5f6a-4235-a69c-94d3414c3784 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.001718] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb4acda-dea7-4c8a-95a0-9efd8a948f2e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.010130] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64959c6-daa7-46f3-b3d5-fbfa36facf25 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.027267] env[62914]: DEBUG nova.compute.provider_tree [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.530292] env[62914]: DEBUG nova.scheduler.client.report [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1205.035702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.036211] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1205.038863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.781s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.040347] env[62914]: INFO nova.compute.claims [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1205.425031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.425382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.425382] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.425645] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.425894] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.428962] env[62914]: INFO nova.compute.manager [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Terminating instance [ 1205.431389] env[62914]: DEBUG nova.compute.manager [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1205.431589] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1205.432433] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a23856-3059-4232-9fbc-761592ae8937 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.440486] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1205.440679] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2935f02-6e96-4f13-92a2-bc32e1631246 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.447178] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1205.447178] env[62914]: value = "task-4832853" [ 1205.447178] env[62914]: _type = "Task" [ 1205.447178] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.456846] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.545446] env[62914]: DEBUG nova.compute.utils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1205.550056] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1205.550056] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1205.596090] env[62914]: DEBUG nova.policy [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da6461b0a03841c2b5f7fee9cadaee88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b216f8b19dc43648f7b53db8f433685', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1205.914261] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Successfully created port: 94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1205.956913] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832853, 'name': PowerOffVM_Task, 'duration_secs': 0.188219} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.957220] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1205.957465] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1205.957738] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d203d542-5413-49d7-bc14-4c71789b7629 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.032447] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1206.032692] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1206.032884] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleting the datastore file [datastore2] 0c47848d-fcff-404d-8e84-e9fd09be9e9e {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1206.033499] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb946f2f-83cb-4a5d-9a88-195a428350ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.039860] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1206.039860] env[62914]: value = "task-4832855" [ 1206.039860] env[62914]: _type = "Task" [ 1206.039860] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.048693] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.054190] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1206.183393] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763ef4db-d8fa-455a-b106-8190959ec79e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.191473] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46eb1b38-7162-402f-91cf-4642661c0c4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.223646] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3e1d0c-6396-4ca0-9626-6c38fb2bcf23 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.231404] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9172d64e-5aab-4b1c-a046-70da87a89870 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.245877] env[62914]: DEBUG nova.compute.provider_tree [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.550510] env[62914]: DEBUG oslo_vmware.api [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176906} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.550910] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1206.551133] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1206.551336] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1206.551520] env[62914]: INFO nova.compute.manager [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1206.551768] env[62914]: DEBUG oslo.service.loopingcall [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1206.551967] env[62914]: DEBUG nova.compute.manager [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1206.552077] env[62914]: DEBUG nova.network.neutron [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1206.749390] env[62914]: DEBUG nova.scheduler.client.report [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1207.067301] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1207.099629] env[62914]: DEBUG nova.compute.manager [req-1fbd5a0a-aa15-4cc5-a9f4-099f7e73b350 req-cd6fbe06-2cbf-4179-b0d7-edf72c967530 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Received event network-vif-deleted-bd8a334c-ccd2-4d47-8194-494527e06ae9 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1207.099881] env[62914]: INFO nova.compute.manager [req-1fbd5a0a-aa15-4cc5-a9f4-099f7e73b350 req-cd6fbe06-2cbf-4179-b0d7-edf72c967530 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Neutron deleted interface bd8a334c-ccd2-4d47-8194-494527e06ae9; detaching it from the instance and deleting it from the info cache [ 1207.100030] env[62914]: DEBUG nova.network.neutron [req-1fbd5a0a-aa15-4cc5-a9f4-099f7e73b350 req-cd6fbe06-2cbf-4179-b0d7-edf72c967530 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.104238] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1207.104521] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1207.104642] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.104826] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1207.104976] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.105150] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1207.105381] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1207.105552] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1207.105725] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1207.105895] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1207.106104] env[62914]: DEBUG nova.virt.hardware [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1207.107314] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d650b539-5bc1-4452-ba36-67cb8b378116 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.116537] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45a1873-f2ef-42d6-8019-fbda30e86d16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.256020] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.256645] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1207.448957] env[62914]: DEBUG nova.compute.manager [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Received event network-vif-plugged-94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1207.448957] env[62914]: DEBUG oslo_concurrency.lockutils [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] Acquiring lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.448957] env[62914]: DEBUG oslo_concurrency.lockutils [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.448957] env[62914]: DEBUG oslo_concurrency.lockutils [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.448957] env[62914]: DEBUG nova.compute.manager [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] No waiting events found dispatching network-vif-plugged-94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1207.448957] env[62914]: WARNING nova.compute.manager [req-a00d8d0b-6edd-440e-9589-7eca7f1d90ee req-a975b000-1fa9-49ac-9d83-a565861edaea service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Received unexpected event network-vif-plugged-94196113-7172-43c5-82b1-7375a40b8516 for instance with vm_state building and task_state spawning. [ 1207.535040] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Successfully updated port: 94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.574699] env[62914]: DEBUG nova.network.neutron [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.603261] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e050670-0cef-47fe-aa6c-9ae349dc4126 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.612996] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868cfda0-5298-4435-b2c4-3f9f5bc87261 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.643346] env[62914]: DEBUG nova.compute.manager [req-1fbd5a0a-aa15-4cc5-a9f4-099f7e73b350 req-cd6fbe06-2cbf-4179-b0d7-edf72c967530 service nova] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Detach interface failed, port_id=bd8a334c-ccd2-4d47-8194-494527e06ae9, reason: Instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1207.761736] env[62914]: DEBUG nova.compute.utils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1207.763328] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1207.763953] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1207.802302] env[62914]: DEBUG nova.policy [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4f1342629ac4aee802a2b69a5459827', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ae1b7abf6f24eccb2b44d82687deb76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1208.038270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.038441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquired lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.038587] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1208.077146] env[62914]: INFO nova.compute.manager [-] [instance: 0c47848d-fcff-404d-8e84-e9fd09be9e9e] Took 1.52 seconds to deallocate network for instance. [ 1208.266794] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1208.576315] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Successfully created port: 8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1208.582016] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1208.584499] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.584743] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.584965] env[62914]: DEBUG nova.objects.instance [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'resources' on Instance uuid 0c47848d-fcff-404d-8e84-e9fd09be9e9e {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.726201] env[62914]: DEBUG nova.network.neutron [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Updating instance_info_cache with network_info: [{"id": "94196113-7172-43c5-82b1-7375a40b8516", "address": "fa:16:3e:42:58:04", "network": {"id": "36d6d096-a2b3-4fc7-b9b7-faf1e085c851", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1534795861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b216f8b19dc43648f7b53db8f433685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94196113-71", "ovs_interfaceid": "94196113-7172-43c5-82b1-7375a40b8516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.229330] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Releasing lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.229657] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Instance network_info: |[{"id": "94196113-7172-43c5-82b1-7375a40b8516", "address": "fa:16:3e:42:58:04", "network": {"id": "36d6d096-a2b3-4fc7-b9b7-faf1e085c851", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1534795861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b216f8b19dc43648f7b53db8f433685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94196113-71", "ovs_interfaceid": "94196113-7172-43c5-82b1-7375a40b8516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1209.230192] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:58:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94196113-7172-43c5-82b1-7375a40b8516', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.242697] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Creating folder: Project (8b216f8b19dc43648f7b53db8f433685). Parent ref: group-v941773. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1209.244021] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0709ef4-1162-4b8d-a99a-7077179a4104 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.246356] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f8da77-0eea-40b1-94ae-eeb1a0717f71 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.256286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f089a7b-1716-434b-92a9-08e8624a30fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.261545] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Created folder: Project (8b216f8b19dc43648f7b53db8f433685) in parent group-v941773. [ 1209.261761] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Creating folder: Instances. Parent ref: group-v942101. {{(pid=62914) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1209.262028] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d58b61d-59b8-449b-a360-a963ed3da3fa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.290894] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1209.295400] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3676210b-c31e-4485-bd8e-7ead0ffcb0b1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.298479] env[62914]: INFO nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Created folder: Instances in parent group-v942101. [ 1209.298747] env[62914]: DEBUG oslo.service.loopingcall [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.298960] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1209.299668] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e6acc96-6496-4a1b-b33c-77570a8308e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.321277] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1ccf96-51c2-4665-9728-a623836f9315 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.327209] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.327209] env[62914]: value = "task-4832860" [ 1209.327209] env[62914]: _type = "Task" [ 1209.327209] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.329629] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1209.329874] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1209.330118] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.330259] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1209.330440] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.330599] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1209.330813] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1209.330980] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1209.331189] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1209.331368] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1209.331554] env[62914]: DEBUG nova.virt.hardware [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1209.332950] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d0d98-15d5-415f-8646-f5a4fc2eb5a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.350331] env[62914]: DEBUG nova.compute.provider_tree [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.361139] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc74172a-2710-4b6d-835e-eee0a16eddd9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.365541] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832860, 'name': CreateVM_Task} progress is 10%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.482166] env[62914]: DEBUG nova.compute.manager [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Received event network-changed-94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1209.482166] env[62914]: DEBUG nova.compute.manager [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Refreshing instance network info cache due to event network-changed-94196113-7172-43c5-82b1-7375a40b8516. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1209.482166] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] Acquiring lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.482166] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] Acquired lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.483059] env[62914]: DEBUG nova.network.neutron [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Refreshing network info cache for port 94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1209.844356] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832860, 'name': CreateVM_Task, 'duration_secs': 0.33728} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.844984] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1209.845337] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.845510] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.845840] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1209.846130] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa0be40-bb3d-4e67-a578-2a14c7e23c41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.851986] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1209.851986] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bb293c-9105-10fc-0c1e-f666d78602cd" [ 1209.851986] env[62914]: _type = "Task" [ 1209.851986] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.855937] env[62914]: DEBUG nova.scheduler.client.report [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1209.866726] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bb293c-9105-10fc-0c1e-f666d78602cd, 'name': SearchDatastore_Task, 'duration_secs': 0.011453} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.866891] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.867158] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1209.867793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.867793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.867793] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1209.868412] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6e149ff-6669-4358-8889-8406838796f3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.877933] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1209.877933] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1209.879484] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e12c9efc-7fb4-488a-8782-3977b5222e01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.886821] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1209.886821] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea4815-466f-90c0-d945-0ad15aff86b3" [ 1209.886821] env[62914]: _type = "Task" [ 1209.886821] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.896367] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea4815-466f-90c0-d945-0ad15aff86b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.268543] env[62914]: DEBUG nova.network.neutron [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Updated VIF entry in instance network info cache for port 94196113-7172-43c5-82b1-7375a40b8516. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1210.269053] env[62914]: DEBUG nova.network.neutron [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Updating instance_info_cache with network_info: [{"id": "94196113-7172-43c5-82b1-7375a40b8516", "address": "fa:16:3e:42:58:04", "network": {"id": "36d6d096-a2b3-4fc7-b9b7-faf1e085c851", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1534795861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b216f8b19dc43648f7b53db8f433685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94196113-71", "ovs_interfaceid": "94196113-7172-43c5-82b1-7375a40b8516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.362588] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.390752] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Successfully updated port: 8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1210.404485] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52ea4815-466f-90c0-d945-0ad15aff86b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010681} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.405576] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e076d4a9-f414-45c2-9d04-59b84ccb4468 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.412299] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1210.412299] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527a4896-5283-9220-0aa5-1b0d71611261" [ 1210.412299] env[62914]: _type = "Task" [ 1210.412299] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.421708] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527a4896-5283-9220-0aa5-1b0d71611261, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.549686] env[62914]: INFO nova.scheduler.client.report [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted allocations for instance 0c47848d-fcff-404d-8e84-e9fd09be9e9e [ 1210.775420] env[62914]: DEBUG oslo_concurrency.lockutils [req-9e1be589-9a56-4b55-aefa-fbb717cef32f req-732f48e1-7333-4e77-ae01-8a1658c1f552 service nova] Releasing lock "refresh_cache-e0019e52-b793-4db7-a2a3-4553c78ad6b9" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.893712] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.893870] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.894069] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1210.924021] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527a4896-5283-9220-0aa5-1b0d71611261, 'name': SearchDatastore_Task, 'duration_secs': 0.055479} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.924425] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.924724] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e0019e52-b793-4db7-a2a3-4553c78ad6b9/e0019e52-b793-4db7-a2a3-4553c78ad6b9.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1210.925077] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59abe64a-1b9a-4d80-84ef-5a95999ecc75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.932348] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1210.932348] env[62914]: value = "task-4832861" [ 1210.932348] env[62914]: _type = "Task" [ 1210.932348] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.941452] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.058462] env[62914]: DEBUG oslo_concurrency.lockutils [None req-47a7d4c7-2f6e-4009-9b5b-0a10a5a094a7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "0c47848d-fcff-404d-8e84-e9fd09be9e9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.633s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.429561] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1211.444949] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832861, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.520809] env[62914]: DEBUG nova.compute.manager [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Received event network-vif-plugged-8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1211.521102] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Acquiring lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.521364] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.521527] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.521706] env[62914]: DEBUG nova.compute.manager [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] No waiting events found dispatching network-vif-plugged-8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1211.521873] env[62914]: WARNING nova.compute.manager [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Received unexpected event network-vif-plugged-8022bac9-6ae9-47da-a35b-34baa22c828e for instance with vm_state building and task_state spawning. [ 1211.522056] env[62914]: DEBUG nova.compute.manager [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Received event network-changed-8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1211.522273] env[62914]: DEBUG nova.compute.manager [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Refreshing instance network info cache due to event network-changed-8022bac9-6ae9-47da-a35b-34baa22c828e. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1211.522406] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.593186] env[62914]: DEBUG nova.network.neutron [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.943833] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542742} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.944170] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] e0019e52-b793-4db7-a2a3-4553c78ad6b9/e0019e52-b793-4db7-a2a3-4553c78ad6b9.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1211.944453] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1211.944726] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0cdfcd5-348f-449d-b54b-3d3ab6c9edc1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.952887] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1211.952887] env[62914]: value = "task-4832862" [ 1211.952887] env[62914]: _type = "Task" [ 1211.952887] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.962421] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.095939] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.096311] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Instance network_info: |[{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1212.096790] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.097080] env[62914]: DEBUG nova.network.neutron [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Refreshing network info cache for port 8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1212.098355] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:4f:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c9a12d2-469f-4199-bfaa-f791d765deac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8022bac9-6ae9-47da-a35b-34baa22c828e', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1212.105973] env[62914]: DEBUG oslo.service.loopingcall [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1212.106980] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1212.107243] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bb4033b-3162-414a-b1ec-34b1d4d4eb2f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.128181] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1212.128181] env[62914]: value = "task-4832863" [ 1212.128181] env[62914]: _type = "Task" [ 1212.128181] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.138036] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832863, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.157115] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.157515] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.230374] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.230657] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.230885] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.231081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.231285] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.234048] env[62914]: INFO nova.compute.manager [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Terminating instance [ 1212.236213] env[62914]: DEBUG nova.compute.manager [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1212.236419] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1212.237623] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecf8b6e-7afa-40de-905b-89c992fefa5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.245987] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1212.246301] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbd31d50-9783-4256-b07d-1bdb58e32129 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.253698] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1212.253698] env[62914]: value = "task-4832864" [ 1212.253698] env[62914]: _type = "Task" [ 1212.253698] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.263298] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.463280] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071576} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.463692] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1212.464472] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fc33a9-af2d-460d-9319-0d54ac4a6ced {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.487620] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] e0019e52-b793-4db7-a2a3-4553c78ad6b9/e0019e52-b793-4db7-a2a3-4553c78ad6b9.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1212.487953] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4c56bf2-ada0-4747-a5fe-a15cfd1e4e9c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.509970] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1212.509970] env[62914]: value = "task-4832865" [ 1212.509970] env[62914]: _type = "Task" [ 1212.509970] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.519200] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832865, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.638323] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832863, 'name': CreateVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.659891] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1212.763076] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832864, 'name': PowerOffVM_Task, 'duration_secs': 0.236748} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.765793] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1212.766014] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1212.766326] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce107933-b785-4630-a257-0d1b247b4dc4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.835410] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1212.835717] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1212.835943] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleting the datastore file [datastore2] bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.836291] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10b8d981-593b-41b1-94f8-a1830f69a834 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.844945] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for the task: (returnval){ [ 1212.844945] env[62914]: value = "task-4832867" [ 1212.844945] env[62914]: _type = "Task" [ 1212.844945] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.854284] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.876502] env[62914]: DEBUG nova.network.neutron [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updated VIF entry in instance network info cache for port 8022bac9-6ae9-47da-a35b-34baa22c828e. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1212.876945] env[62914]: DEBUG nova.network.neutron [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.024371] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832865, 'name': ReconfigVM_Task, 'duration_secs': 0.473556} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.024860] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Reconfigured VM instance instance-00000078 to attach disk [datastore2] e0019e52-b793-4db7-a2a3-4553c78ad6b9/e0019e52-b793-4db7-a2a3-4553c78ad6b9.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1213.025827] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45ddd6ec-bc80-49bb-a2cc-477c4cd5ce92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.034191] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1213.034191] env[62914]: value = "task-4832868" [ 1213.034191] env[62914]: _type = "Task" [ 1213.034191] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.049942] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832868, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.139406] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832863, 'name': CreateVM_Task, 'duration_secs': 0.574244} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.139614] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1213.140469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.140674] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.141018] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1213.141320] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33795684-77c0-4369-91d5-cf999e4fe3d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.147127] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1213.147127] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]527ba827-efd9-a579-5057-5b0217987f85" [ 1213.147127] env[62914]: _type = "Task" [ 1213.147127] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.158956] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527ba827-efd9-a579-5057-5b0217987f85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.185389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.185389] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.186565] env[62914]: INFO nova.compute.claims [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1213.355470] env[62914]: DEBUG oslo_vmware.api [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Task: {'id': task-4832867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177156} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.355720] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1213.355874] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1213.356080] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1213.356274] env[62914]: INFO nova.compute.manager [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1213.356551] env[62914]: DEBUG oslo.service.loopingcall [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1213.356768] env[62914]: DEBUG nova.compute.manager [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1213.356863] env[62914]: DEBUG nova.network.neutron [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1213.379621] env[62914]: DEBUG oslo_concurrency.lockutils [req-72190631-7808-4716-8ef2-1735bebd9e3f req-f447d2db-099c-4bf5-aa01-c050fdbd6122 service nova] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.545710] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832868, 'name': Rename_Task, 'duration_secs': 0.148326} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.546075] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1213.546361] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ab9b0ea-dac7-4b2f-b360-8e0d53ba3448 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.554407] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1213.554407] env[62914]: value = "task-4832869" [ 1213.554407] env[62914]: _type = "Task" [ 1213.554407] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.563977] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.610485] env[62914]: DEBUG nova.compute.manager [req-929c4320-3cfb-4feb-8aaa-9e118e2da47a req-504959ee-a908-41b3-a987-591fc61ce3b1 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Received event network-vif-deleted-f752f060-cdfa-4b16-904d-9263dfa26442 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1213.610696] env[62914]: INFO nova.compute.manager [req-929c4320-3cfb-4feb-8aaa-9e118e2da47a req-504959ee-a908-41b3-a987-591fc61ce3b1 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Neutron deleted interface f752f060-cdfa-4b16-904d-9263dfa26442; detaching it from the instance and deleting it from the info cache [ 1213.610880] env[62914]: DEBUG nova.network.neutron [req-929c4320-3cfb-4feb-8aaa-9e118e2da47a req-504959ee-a908-41b3-a987-591fc61ce3b1 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.658640] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]527ba827-efd9-a579-5057-5b0217987f85, 'name': SearchDatastore_Task, 'duration_secs': 0.018871} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.658988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.659282] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1213.659543] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.659702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.659890] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1213.660201] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c287149c-9eb6-4d15-97d9-b9e3aa689273 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.669656] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1213.669854] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1213.670607] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2044d7b-0a04-4abe-8b6c-35b33c2acf09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.676531] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1213.676531] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad441-cce6-9bad-683d-551b82a1882f" [ 1213.676531] env[62914]: _type = "Task" [ 1213.676531] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.685040] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad441-cce6-9bad-683d-551b82a1882f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.064959] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832869, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.082462] env[62914]: DEBUG nova.network.neutron [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.113476] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2efed25d-d5d6-4398-94ae-ea9f58a68878 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.123870] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d440062-fbd6-41b5-a647-ca1b71732b99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.158731] env[62914]: DEBUG nova.compute.manager [req-929c4320-3cfb-4feb-8aaa-9e118e2da47a req-504959ee-a908-41b3-a987-591fc61ce3b1 service nova] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Detach interface failed, port_id=f752f060-cdfa-4b16-904d-9263dfa26442, reason: Instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1214.187803] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523ad441-cce6-9bad-683d-551b82a1882f, 'name': SearchDatastore_Task, 'duration_secs': 0.041234} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.189034] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b20fa18d-f91f-497a-b8ab-33d561ba4cec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.198582] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1214.198582] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c663c-3d9e-f580-6af4-4ab6655fc151" [ 1214.198582] env[62914]: _type = "Task" [ 1214.198582] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.208509] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c663c-3d9e-f580-6af4-4ab6655fc151, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.321975] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07723e19-ae0b-428f-8f70-891883dae03b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.330346] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf39a42-9923-4981-bfb9-5af9956accfa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.361462] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a84de78-6ba8-4a8d-aa92-7d1e16afe8ff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.370104] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c1b99b-309e-455b-ba79-84cd4be0c785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.383925] env[62914]: DEBUG nova.compute.provider_tree [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.567740] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832869, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.585625] env[62914]: INFO nova.compute.manager [-] [instance: bd973845-e7cf-4c5a-9a6b-3ae15ada9f64] Took 1.23 seconds to deallocate network for instance. [ 1214.712105] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]528c663c-3d9e-f580-6af4-4ab6655fc151, 'name': SearchDatastore_Task, 'duration_secs': 0.023129} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.712354] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.712622] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1214.712893] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a0c96f7-06ff-4590-8885-ab1591a2aabe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.720824] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1214.720824] env[62914]: value = "task-4832870" [ 1214.720824] env[62914]: _type = "Task" [ 1214.720824] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.729115] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.886849] env[62914]: DEBUG nova.scheduler.client.report [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.066033] env[62914]: DEBUG oslo_vmware.api [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832869, 'name': PowerOnVM_Task, 'duration_secs': 1.046887} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.066348] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1215.066606] env[62914]: INFO nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Took 8.00 seconds to spawn the instance on the hypervisor. [ 1215.066763] env[62914]: DEBUG nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1215.067586] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbe3e91-b6c8-400c-b05a-8e1218b39e7a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.092200] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.231651] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832870, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.391511] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.391938] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1215.394836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.303s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.395959] env[62914]: DEBUG nova.objects.instance [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lazy-loading 'resources' on Instance uuid bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.586425] env[62914]: INFO nova.compute.manager [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Took 13.54 seconds to build instance. [ 1215.731346] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.889926} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.731655] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1215.731891] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1215.732183] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c7bfdb8-8292-48b9-9d35-38a5e46c1dd6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.740740] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1215.740740] env[62914]: value = "task-4832871" [ 1215.740740] env[62914]: _type = "Task" [ 1215.740740] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.754154] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.899317] env[62914]: DEBUG nova.compute.utils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1215.905048] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1215.905351] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1215.964481] env[62914]: DEBUG nova.policy [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1216.033200] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34da675f-14ad-442a-9a08-c221126a7556 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.041528] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016f5089-2747-4cb6-b7ce-86c8f0e3a5a2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.075636] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f826d0-81d6-484d-a7fb-1b18c3bf613d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.083804] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c1713-c49b-4588-a544-0c5fe85dc2e9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.088356] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2704ed9f-f99d-456f-b316-f84f50509844 tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.047s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.097997] env[62914]: DEBUG nova.compute.provider_tree [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.250416] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141922} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.250843] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1216.252089] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2deb5612-c1aa-4dbe-92fc-4465406fb770 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.275744] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.276083] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3f1660b-bd15-4663-af21-b10bd8ab39b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.299623] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.299890] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.300117] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.300301] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.300468] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.302315] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1216.302315] env[62914]: value = "task-4832872" [ 1216.302315] env[62914]: _type = "Task" [ 1216.302315] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.303228] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Successfully created port: d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1216.305533] env[62914]: INFO nova.compute.manager [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Terminating instance [ 1216.311040] env[62914]: DEBUG nova.compute.manager [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1216.311258] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1216.312029] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770bc809-9811-4798-8094-3ed2b1da0847 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.324038] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.326554] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1216.326867] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a9a20de-13a2-418d-863d-2af425f91085 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.332890] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1216.332890] env[62914]: value = "task-4832873" [ 1216.332890] env[62914]: _type = "Task" [ 1216.332890] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.344230] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.408913] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1216.601699] env[62914]: DEBUG nova.scheduler.client.report [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.815533] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.843646] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832873, 'name': PowerOffVM_Task, 'duration_secs': 0.189823} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.843870] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1216.844083] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1216.844398] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9173427-463c-424c-ba80-2b9d856e0423 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.911479] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1216.911729] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1216.911914] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Deleting the datastore file [datastore2] e0019e52-b793-4db7-a2a3-4553c78ad6b9 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1216.912211] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a76ca2e1-2058-4794-ad76-6737a4c14abc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.915369] env[62914]: INFO nova.virt.block_device [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Booting with volume 6704424c-ebf8-4ade-9901-bcd14b7d5207 at /dev/sda [ 1216.927379] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for the task: (returnval){ [ 1216.927379] env[62914]: value = "task-4832875" [ 1216.927379] env[62914]: _type = "Task" [ 1216.927379] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.933453] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.958413] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbc7f2eb-befe-44e0-b29b-e4727f0f5cdf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.968151] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd9ff8f-bf31-4dbf-b745-c5020c6fa289 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.000474] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb893826-eec3-4ed5-b6cd-faa6469ab721 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.010284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8761199-1b16-4644-87e5-07498d0384ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.046147] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42d12eb-207f-469d-bf00-438bbe73e8b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.054943] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5408d810-ba7e-427e-b119-26e7292a09a7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.072482] env[62914]: DEBUG nova.virt.block_device [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating existing volume attachment record: 41855a55-f2c5-4703-8079-cb6d33607e0c {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1217.077788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.078040] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.107401] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.133655] env[62914]: INFO nova.scheduler.client.report [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Deleted allocations for instance bd973845-e7cf-4c5a-9a6b-3ae15ada9f64 [ 1217.317458] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.435299] env[62914]: DEBUG oslo_vmware.api [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Task: {'id': task-4832875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158357} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.435721] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.435963] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1217.436190] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1217.436378] env[62914]: INFO nova.compute.manager [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1217.436662] env[62914]: DEBUG oslo.service.loopingcall [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1217.436901] env[62914]: DEBUG nova.compute.manager [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1217.437008] env[62914]: DEBUG nova.network.neutron [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1217.581641] env[62914]: DEBUG nova.compute.utils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1217.642060] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e03a469f-b53a-4b1e-ab7c-7985d55102d7 tempest-ServerRescueNegativeTestJSON-298244947 tempest-ServerRescueNegativeTestJSON-298244947-project-member] Lock "bd973845-e7cf-4c5a-9a6b-3ae15ada9f64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.411s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.711214] env[62914]: DEBUG nova.compute.manager [req-17b0bcb0-1de5-49a1-89c6-9e233143ed70 req-3481c784-ca08-41cd-bcb9-108594be7514 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Received event network-vif-deleted-94196113-7172-43c5-82b1-7375a40b8516 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1217.711438] env[62914]: INFO nova.compute.manager [req-17b0bcb0-1de5-49a1-89c6-9e233143ed70 req-3481c784-ca08-41cd-bcb9-108594be7514 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Neutron deleted interface 94196113-7172-43c5-82b1-7375a40b8516; detaching it from the instance and deleting it from the info cache [ 1217.711630] env[62914]: DEBUG nova.network.neutron [req-17b0bcb0-1de5-49a1-89c6-9e233143ed70 req-3481c784-ca08-41cd-bcb9-108594be7514 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.816961] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832872, 'name': ReconfigVM_Task, 'duration_secs': 1.22327} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.817546] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.818212] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2a269a3-928e-4411-871f-e743cca7c9f0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.825744] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1217.825744] env[62914]: value = "task-4832876" [ 1217.825744] env[62914]: _type = "Task" [ 1217.825744] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.835332] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832876, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.882426] env[62914]: DEBUG nova.compute.manager [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Received event network-vif-plugged-d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1217.882656] env[62914]: DEBUG oslo_concurrency.lockutils [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.882868] env[62914]: DEBUG oslo_concurrency.lockutils [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.883142] env[62914]: DEBUG oslo_concurrency.lockutils [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.883398] env[62914]: DEBUG nova.compute.manager [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] No waiting events found dispatching network-vif-plugged-d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1217.883702] env[62914]: WARNING nova.compute.manager [req-c8665a3e-7dbe-4acd-9c2f-65cd8948e44f req-4d6f83a2-a295-4faf-9902-46e878f5ce9d service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Received unexpected event network-vif-plugged-d93b160a-3672-4d89-8d58-d29cca7cd2cf for instance with vm_state building and task_state block_device_mapping. [ 1218.052329] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Successfully updated port: d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1218.086270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.183034] env[62914]: DEBUG nova.network.neutron [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.216023] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2198d006-00e9-45de-a527-abbe664993b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.224627] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32d9dcd-d6c8-48f7-8ec0-ddeb7194be60 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.259552] env[62914]: DEBUG nova.compute.manager [req-17b0bcb0-1de5-49a1-89c6-9e233143ed70 req-3481c784-ca08-41cd-bcb9-108594be7514 service nova] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Detach interface failed, port_id=94196113-7172-43c5-82b1-7375a40b8516, reason: Instance e0019e52-b793-4db7-a2a3-4553c78ad6b9 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1218.336716] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832876, 'name': Rename_Task, 'duration_secs': 0.382948} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.336961] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1218.337284] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-298b5ced-72d4-4a6f-8624-12986cefb3e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.345248] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1218.345248] env[62914]: value = "task-4832877" [ 1218.345248] env[62914]: _type = "Task" [ 1218.345248] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.354595] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.553836] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1218.554079] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.554762] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1218.685749] env[62914]: INFO nova.compute.manager [-] [instance: e0019e52-b793-4db7-a2a3-4553c78ad6b9] Took 1.25 seconds to deallocate network for instance. [ 1218.860036] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832877, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.094054] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1219.166484] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1219.166805] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1219.167048] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1219.167196] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.167399] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1219.167580] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.167738] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1219.167948] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1219.168129] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1219.168302] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1219.168470] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1219.168648] env[62914]: DEBUG nova.virt.hardware [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1219.169301] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.169525] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.169749] env[62914]: INFO nova.compute.manager [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Attaching volume ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f to /dev/sdb [ 1219.172307] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f004be-a8d3-49ab-b5ac-29948716d28f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.183708] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e799a63-37cb-401e-a8ac-b400c657cf02 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.204264] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.204264] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.204556] env[62914]: DEBUG nova.objects.instance [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lazy-loading 'resources' on Instance uuid e0019e52-b793-4db7-a2a3-4553c78ad6b9 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.218793] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da167bac-4d9f-42ee-901c-a0be1387dd5e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.227103] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c47b595-7913-4239-bfd0-2616cbe05e98 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.249334] env[62914]: DEBUG nova.virt.block_device [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating existing volume attachment record: 378891ce-81ed-404f-8b9d-986e877e62eb {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1219.356848] env[62914]: DEBUG oslo_vmware.api [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832877, 'name': PowerOnVM_Task, 'duration_secs': 0.790721} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.357110] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1219.357414] env[62914]: INFO nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Took 10.07 seconds to spawn the instance on the hypervisor. [ 1219.357515] env[62914]: DEBUG nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1219.358350] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b193d6d-9386-4541-bb8e-26b8b5b18534 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.388694] env[62914]: DEBUG nova.network.neutron [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [{"id": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "address": "fa:16:3e:0c:6f:0b", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93b160a-36", "ovs_interfaceid": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.816958] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a288d3-74d8-4cf7-943a-5f5a9a45b9e4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.826285] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe934d2-b0e8-436f-a5e0-9b7ba795b3d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.861462] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cc7811-e384-4f41-b1ea-b41ed0455ca3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.876486] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50c90a5-8805-45a6-a0b3-35d28fb895f8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.884392] env[62914]: INFO nova.compute.manager [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Took 16.65 seconds to build instance. [ 1219.896871] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.897163] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance network_info: |[{"id": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "address": "fa:16:3e:0c:6f:0b", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93b160a-36", "ovs_interfaceid": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1219.898677] env[62914]: DEBUG nova.compute.provider_tree [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.899503] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:6f:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd93b160a-3672-4d89-8d58-d29cca7cd2cf', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1219.908226] env[62914]: DEBUG oslo.service.loopingcall [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1219.908329] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1219.909192] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc22b7a5-f1cd-4f68-8f5e-ac6c47b6b81f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.928949] env[62914]: DEBUG nova.compute.manager [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Received event network-changed-d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1219.929099] env[62914]: DEBUG nova.compute.manager [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Refreshing instance network info cache due to event network-changed-d93b160a-3672-4d89-8d58-d29cca7cd2cf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1219.929349] env[62914]: DEBUG oslo_concurrency.lockutils [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] Acquiring lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.929538] env[62914]: DEBUG oslo_concurrency.lockutils [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] Acquired lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.929655] env[62914]: DEBUG nova.network.neutron [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Refreshing network info cache for port d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1219.937515] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1219.937515] env[62914]: value = "task-4832879" [ 1219.937515] env[62914]: _type = "Task" [ 1219.937515] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.949921] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832879, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.321111] env[62914]: DEBUG nova.compute.manager [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Received event network-changed-8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1220.321418] env[62914]: DEBUG nova.compute.manager [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Refreshing instance network info cache due to event network-changed-8022bac9-6ae9-47da-a35b-34baa22c828e. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1220.321718] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.322276] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.322276] env[62914]: DEBUG nova.network.neutron [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Refreshing network info cache for port 8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1220.397327] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2ae6647d-f362-4c43-ae73-ca693e087bfb tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.180s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.409931] env[62914]: DEBUG nova.scheduler.client.report [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1220.449433] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832879, 'name': CreateVM_Task, 'duration_secs': 0.299364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.449594] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1220.450614] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '41855a55-f2c5-4703-8079-cb6d33607e0c', 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942100', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'name': 'volume-6704424c-ebf8-4ade-9901-bcd14b7d5207', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d764dc0-133c-4d0d-a8e2-da82270252ec', 'attached_at': '', 'detached_at': '', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'serial': '6704424c-ebf8-4ade-9901-bcd14b7d5207'}, 'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62914) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1220.450614] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Root volume attach. Driver type: vmdk {{(pid=62914) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1220.451593] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1ebd58-bd7f-44fb-9a05-726908419e4e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.460686] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8497a777-f6ad-49a1-99ba-2bea6906e678 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.468476] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8d2622-6900-4198-a7b0-4679ceaf515a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.475730] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d4e4296b-f1b5-46be-9d13-24e77a5b1d5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.484522] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1220.484522] env[62914]: value = "task-4832880" [ 1220.484522] env[62914]: _type = "Task" [ 1220.484522] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.497550] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.679803] env[62914]: DEBUG nova.network.neutron [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updated VIF entry in instance network info cache for port d93b160a-3672-4d89-8d58-d29cca7cd2cf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1220.680294] env[62914]: DEBUG nova.network.neutron [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [{"id": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "address": "fa:16:3e:0c:6f:0b", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93b160a-36", "ovs_interfaceid": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.916030] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.943296] env[62914]: INFO nova.scheduler.client.report [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Deleted allocations for instance e0019e52-b793-4db7-a2a3-4553c78ad6b9 [ 1220.998619] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 34%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.084666] env[62914]: DEBUG nova.network.neutron [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updated VIF entry in instance network info cache for port 8022bac9-6ae9-47da-a35b-34baa22c828e. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1221.085140] env[62914]: DEBUG nova.network.neutron [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.183247] env[62914]: DEBUG oslo_concurrency.lockutils [req-37ca7cca-1188-4081-88ef-90690bf66ff4 req-55a0cc66-7da3-468c-9323-4a7f91d0436b service nova] Releasing lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.452979] env[62914]: DEBUG oslo_concurrency.lockutils [None req-afc08517-530d-4530-8991-8c20559b247e tempest-ServersNegativeTestMultiTenantJSON-2089542293 tempest-ServersNegativeTestMultiTenantJSON-2089542293-project-member] Lock "e0019e52-b793-4db7-a2a3-4553c78ad6b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.153s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.506758] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 45%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.588429] env[62914]: DEBUG oslo_concurrency.lockutils [req-d6578deb-8ac7-4fc5-8839-c387e01df59b req-13b547ff-dd3b-4465-8ac7-975c70f65998 service nova] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.002418] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 58%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.501829] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 71%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.005149] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 84%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.500936] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 97%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.798019] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1223.798019] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942105', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'name': 'volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cad97698-d68d-42de-a4de-772917e60374', 'attached_at': '', 'detached_at': '', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'serial': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1223.798019] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16de6b79-ee6c-464d-9911-38fa8f9ae559 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.814891] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1aa18b-3043-4f58-9059-210244fbee70 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.841519] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f/volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1223.842180] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de905bc3-ab48-4ab2-9e17-3f9011e6b805 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.862139] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1223.862139] env[62914]: value = "task-4832882" [ 1223.862139] env[62914]: _type = "Task" [ 1223.862139] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.872535] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.003582] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 98%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.372939] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832882, 'name': ReconfigVM_Task, 'duration_secs': 0.373946} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.373305] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f/volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1224.378530] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd8bc639-0b5d-4b48-8ec4-85985c08f264 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.395934] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1224.395934] env[62914]: value = "task-4832883" [ 1224.395934] env[62914]: _type = "Task" [ 1224.395934] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.405377] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.502366] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task} progress is 98%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.906448] env[62914]: DEBUG oslo_vmware.api [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832883, 'name': ReconfigVM_Task, 'duration_secs': 0.146374} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.906997] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942105', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'name': 'volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cad97698-d68d-42de-a4de-772917e60374', 'attached_at': '', 'detached_at': '', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'serial': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1225.002024] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832880, 'name': RelocateVM_Task, 'duration_secs': 4.458061} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.002401] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1225.002637] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942100', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'name': 'volume-6704424c-ebf8-4ade-9901-bcd14b7d5207', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d764dc0-133c-4d0d-a8e2-da82270252ec', 'attached_at': '', 'detached_at': '', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'serial': '6704424c-ebf8-4ade-9901-bcd14b7d5207'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1225.003598] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806a9f1d-36eb-4ef2-8412-151c8546daaa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.022210] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88afb417-8a53-4143-977e-d75c1e13a113 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.046694] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-6704424c-ebf8-4ade-9901-bcd14b7d5207/volume-6704424c-ebf8-4ade-9901-bcd14b7d5207.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1225.047758] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0aa0f482-fd24-457b-87f0-389903c36d12 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.070110] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1225.070110] env[62914]: value = "task-4832884" [ 1225.070110] env[62914]: _type = "Task" [ 1225.070110] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.079254] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832884, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.581405] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832884, 'name': ReconfigVM_Task, 'duration_secs': 0.257863} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.581900] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-6704424c-ebf8-4ade-9901-bcd14b7d5207/volume-6704424c-ebf8-4ade-9901-bcd14b7d5207.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1225.587579] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c67eafab-34eb-48d8-93a4-c9eb3fa6e06f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.603401] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1225.603401] env[62914]: value = "task-4832885" [ 1225.603401] env[62914]: _type = "Task" [ 1225.603401] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.611917] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832885, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.949345] env[62914]: DEBUG nova.objects.instance [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid cad97698-d68d-42de-a4de-772917e60374 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.113597] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832885, 'name': ReconfigVM_Task, 'duration_secs': 0.122231} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.113933] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942100', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'name': 'volume-6704424c-ebf8-4ade-9901-bcd14b7d5207', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d764dc0-133c-4d0d-a8e2-da82270252ec', 'attached_at': '', 'detached_at': '', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'serial': '6704424c-ebf8-4ade-9901-bcd14b7d5207'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1226.114527] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9acb133-dd70-454d-907f-ec917c992871 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.121789] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1226.121789] env[62914]: value = "task-4832886" [ 1226.121789] env[62914]: _type = "Task" [ 1226.121789] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.131073] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832886, 'name': Rename_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.455166] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e3eed60c-1dcb-4ce5-83aa-3f5a7fe80c57 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.285s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.631830] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832886, 'name': Rename_Task, 'duration_secs': 0.136841} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.632290] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1226.632423] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56d346da-667a-435a-ae69-38f4a399f2b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.639463] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1226.639463] env[62914]: value = "task-4832887" [ 1226.639463] env[62914]: _type = "Task" [ 1226.639463] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.648867] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.736318] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.736541] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.149688] env[62914]: DEBUG oslo_vmware.api [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832887, 'name': PowerOnVM_Task, 'duration_secs': 0.416272} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.149989] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1227.150221] env[62914]: INFO nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1227.150410] env[62914]: DEBUG nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1227.151261] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d41784-99c6-46a0-9e78-299ad64e10fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.239632] env[62914]: INFO nova.compute.manager [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Detaching volume ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f [ 1227.272159] env[62914]: INFO nova.virt.block_device [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Attempting to driver detach volume ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f from mountpoint /dev/sdb [ 1227.272439] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1227.272644] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942105', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'name': 'volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cad97698-d68d-42de-a4de-772917e60374', 'attached_at': '', 'detached_at': '', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'serial': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1227.273546] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9242cec4-0b14-4aac-8434-c3cd192784b8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.296046] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84356d88-940b-4061-9c74-2ab547327433 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.304056] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f341c8f3-b043-4204-aac0-4b91c528bba0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.325894] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0a8ad6-d9b0-4059-a497-39fc54454112 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.342451] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] The volume has not been displaced from its original location: [datastore2] volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f/volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1227.347996] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1227.348454] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98688878-cd91-41fb-8443-035245a5f49b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.367545] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1227.367545] env[62914]: value = "task-4832888" [ 1227.367545] env[62914]: _type = "Task" [ 1227.367545] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.376540] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832888, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.673147] env[62914]: INFO nova.compute.manager [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 14.51 seconds to build instance. [ 1227.880218] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832888, 'name': ReconfigVM_Task, 'duration_secs': 0.208607} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.880516] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1227.885733] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-893bdb55-6c34-4f60-b29d-16447da9c17b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.902252] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1227.902252] env[62914]: value = "task-4832889" [ 1227.902252] env[62914]: _type = "Task" [ 1227.902252] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.911066] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.076811] env[62914]: DEBUG nova.compute.manager [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1228.076811] env[62914]: DEBUG nova.compute.manager [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing instance network info cache due to event network-changed-23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1228.076960] env[62914]: DEBUG oslo_concurrency.lockutils [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] Acquiring lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.077519] env[62914]: DEBUG oslo_concurrency.lockutils [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] Acquired lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.077780] env[62914]: DEBUG nova.network.neutron [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Refreshing network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1228.175637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-b1221d92-e196-4ae9-853b-74efa16f5e48 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.018s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.413700] env[62914]: DEBUG oslo_vmware.api [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832889, 'name': ReconfigVM_Task, 'duration_secs': 0.159122} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.414585] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942105', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'name': 'volume-ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cad97698-d68d-42de-a4de-772917e60374', 'attached_at': '', 'detached_at': '', 'volume_id': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f', 'serial': 'ba4013ee-8cc5-4c2e-975c-0129f9cfcd7f'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1228.779897] env[62914]: DEBUG nova.network.neutron [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updated VIF entry in instance network info cache for port 23732df6-58dd-4637-9c04-c25d6b049c91. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1228.780331] env[62914]: DEBUG nova.network.neutron [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [{"id": "23732df6-58dd-4637-9c04-c25d6b049c91", "address": "fa:16:3e:a9:92:aa", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23732df6-58", "ovs_interfaceid": "23732df6-58dd-4637-9c04-c25d6b049c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.963200] env[62914]: DEBUG nova.objects.instance [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid cad97698-d68d-42de-a4de-772917e60374 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.283491] env[62914]: DEBUG oslo_concurrency.lockutils [req-e0fada75-9cf9-47b4-9881-e8b70ff7e1ea req-8bf8f16f-0bff-493a-9228-fc1d49d64fd4 service nova] Releasing lock "refresh_cache-7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.504058] env[62914]: DEBUG nova.compute.manager [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1229.971506] env[62914]: DEBUG oslo_concurrency.lockutils [None req-e17d1b7e-1899-4966-a8fc-3da9bcbe8289 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.235s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.024323] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.024659] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.100633] env[62914]: DEBUG nova.compute.manager [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Received event network-changed-d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1230.100863] env[62914]: DEBUG nova.compute.manager [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Refreshing instance network info cache due to event network-changed-d93b160a-3672-4d89-8d58-d29cca7cd2cf. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1230.101130] env[62914]: DEBUG oslo_concurrency.lockutils [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] Acquiring lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.101309] env[62914]: DEBUG oslo_concurrency.lockutils [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] Acquired lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.101493] env[62914]: DEBUG nova.network.neutron [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Refreshing network info cache for port d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1230.529661] env[62914]: INFO nova.compute.claims [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.875531] env[62914]: DEBUG nova.network.neutron [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updated VIF entry in instance network info cache for port d93b160a-3672-4d89-8d58-d29cca7cd2cf. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1230.876044] env[62914]: DEBUG nova.network.neutron [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [{"id": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "address": "fa:16:3e:0c:6f:0b", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93b160a-36", "ovs_interfaceid": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.007183] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.007537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.007672] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "cad97698-d68d-42de-a4de-772917e60374-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.007834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.008031] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.010855] env[62914]: INFO nova.compute.manager [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Terminating instance [ 1231.012669] env[62914]: DEBUG nova.compute.manager [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1231.012876] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1231.013808] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f108df8-f6ad-43e7-8187-d312b9d47125 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.021573] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1231.021814] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a71df133-338f-4260-9557-859e87f0a7e7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.029635] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1231.029635] env[62914]: value = "task-4832890" [ 1231.029635] env[62914]: _type = "Task" [ 1231.029635] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.036352] env[62914]: INFO nova.compute.resource_tracker [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating resource usage from migration 4d40a855-6620-4691-8a20-cd42b9d11d49 [ 1231.042613] env[62914]: DEBUG oslo_concurrency.lockutils [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.042867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.044224] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.058447] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Refreshing inventories for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1231.072961] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Updating ProviderTree inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1231.073218] env[62914]: DEBUG nova.compute.provider_tree [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1231.084749] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Refreshing aggregate associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, aggregates: None {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1231.105448] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Refreshing trait associations for resource provider f2f7a014-852b-4b37-9610-c5761f4b0175, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62914) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1231.217751] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0b7956-8e4f-47ae-b900-0ba323d29bb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.226729] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0b21f7-5cec-4105-9d0d-467f09451f4c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.259026] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966c4a99-8fc8-4e0a-976a-53333d27dba9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.267213] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e60130c-c1ba-4565-bc9b-6ea7c2fb2002 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.281154] env[62914]: DEBUG nova.compute.provider_tree [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.378556] env[62914]: DEBUG oslo_concurrency.lockutils [req-988a9370-83f4-4025-9d54-0d51648d73f8 req-dc76914d-02db-4987-881d-bb152b4be4ae service nova] Releasing lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.540271] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832890, 'name': PowerOffVM_Task, 'duration_secs': 0.280643} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.540535] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1231.540716] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1231.540990] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-286812ac-5c8f-4ba8-a8ba-d8df30521eb5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.545560] env[62914]: INFO nova.compute.manager [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Detaching volume 4ff3c664-12a4-426a-aa86-04f6fb6e9e4a [ 1231.582305] env[62914]: INFO nova.virt.block_device [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Attempting to driver detach volume 4ff3c664-12a4-426a-aa86-04f6fb6e9e4a from mountpoint /dev/sdb [ 1231.582558] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1231.582746] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942097', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'name': 'volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74e7896c-8a1f-448d-a44b-e6febfff9000', 'attached_at': '', 'detached_at': '', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'serial': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1231.583698] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5774c32-1f57-47fc-9130-f24239546b75 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.608418] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24868f4-8d42-4901-bcce-3edf83c71dc3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.611193] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1231.611401] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1231.611589] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleting the datastore file [datastore2] cad97698-d68d-42de-a4de-772917e60374 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.611836] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afd6ea59-ef84-41a6-97cd-677b4bb2c63e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.619701] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eae3ed-1c00-4827-8c37-4fcccbde37f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.622194] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1231.622194] env[62914]: value = "task-4832892" [ 1231.622194] env[62914]: _type = "Task" [ 1231.622194] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.642059] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8124bdc-f3ee-4b05-9734-30f312793b42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.647445] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.662190] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] The volume has not been displaced from its original location: [datastore2] volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a/volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1231.667431] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1231.667761] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b42bf807-d68e-44f7-a4f7-a2bdacde5e19 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.686265] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1231.686265] env[62914]: value = "task-4832893" [ 1231.686265] env[62914]: _type = "Task" [ 1231.686265] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.695681] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.784197] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1232.132368] env[62914]: DEBUG oslo_vmware.api [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135161} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.132738] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1232.132783] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1232.132966] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1232.133168] env[62914]: INFO nova.compute.manager [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: cad97698-d68d-42de-a4de-772917e60374] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1232.133441] env[62914]: DEBUG oslo.service.loopingcall [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.133645] env[62914]: DEBUG nova.compute.manager [-] [instance: cad97698-d68d-42de-a4de-772917e60374] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1232.133745] env[62914]: DEBUG nova.network.neutron [-] [instance: cad97698-d68d-42de-a4de-772917e60374] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1232.197088] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832893, 'name': ReconfigVM_Task, 'duration_secs': 0.261445} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.197425] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1232.202437] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dff50f6-a386-4228-b1c4-c6e4313c5807 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.219659] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1232.219659] env[62914]: value = "task-4832894" [ 1232.219659] env[62914]: _type = "Task" [ 1232.219659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.228848] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.289228] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.264s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.289433] env[62914]: INFO nova.compute.manager [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Migrating [ 1232.603721] env[62914]: DEBUG nova.compute.manager [req-434cd93b-48d5-48d3-aff8-38fa9112c855 req-a017bdff-eec7-4a6f-ac68-44141c026ec3 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Received event network-vif-deleted-38dca680-426a-4e56-834d-e95b4f9d439f {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1232.603922] env[62914]: INFO nova.compute.manager [req-434cd93b-48d5-48d3-aff8-38fa9112c855 req-a017bdff-eec7-4a6f-ac68-44141c026ec3 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Neutron deleted interface 38dca680-426a-4e56-834d-e95b4f9d439f; detaching it from the instance and deleting it from the info cache [ 1232.604119] env[62914]: DEBUG nova.network.neutron [req-434cd93b-48d5-48d3-aff8-38fa9112c855 req-a017bdff-eec7-4a6f-ac68-44141c026ec3 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.731073] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832894, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.806910] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.807106] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquired lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.807753] env[62914]: DEBUG nova.network.neutron [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1233.074418] env[62914]: DEBUG nova.network.neutron [-] [instance: cad97698-d68d-42de-a4de-772917e60374] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.106501] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6e49272-34f6-427e-a53e-17b197c41ca8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.117292] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dba772f-3f39-4e02-845d-3cee852a5122 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.150159] env[62914]: DEBUG nova.compute.manager [req-434cd93b-48d5-48d3-aff8-38fa9112c855 req-a017bdff-eec7-4a6f-ac68-44141c026ec3 service nova] [instance: cad97698-d68d-42de-a4de-772917e60374] Detach interface failed, port_id=38dca680-426a-4e56-834d-e95b4f9d439f, reason: Instance cad97698-d68d-42de-a4de-772917e60374 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1233.231060] env[62914]: DEBUG oslo_vmware.api [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832894, 'name': ReconfigVM_Task, 'duration_secs': 0.536066} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.231399] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942097', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'name': 'volume-4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74e7896c-8a1f-448d-a44b-e6febfff9000', 'attached_at': '', 'detached_at': '', 'volume_id': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a', 'serial': '4ff3c664-12a4-426a-aa86-04f6fb6e9e4a'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1233.534992] env[62914]: DEBUG nova.network.neutron [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [{"id": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "address": "fa:16:3e:0c:6f:0b", "network": {"id": "dbcfa84b-ba0f-435c-a656-d66a0b3944d2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1907072947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d141c01c1d5848eea6ef2b831e431ba5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93b160a-36", "ovs_interfaceid": "d93b160a-3672-4d89-8d58-d29cca7cd2cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.576848] env[62914]: INFO nova.compute.manager [-] [instance: cad97698-d68d-42de-a4de-772917e60374] Took 1.44 seconds to deallocate network for instance. [ 1233.777305] env[62914]: DEBUG nova.objects.instance [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.038687] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Releasing lock "refresh_cache-8d764dc0-133c-4d0d-a8e2-da82270252ec" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.083988] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.084298] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.084638] env[62914]: DEBUG nova.objects.instance [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'resources' on Instance uuid cad97698-d68d-42de-a4de-772917e60374 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.568442] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.693354] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d85df1a-019c-4a25-883a-961fdae4735c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.701564] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a776884b-7815-46c3-a030-fb3dec57a586 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.735266] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5b395a-5720-40e3-809f-d5c0fe398151 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.743710] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba582e98-876d-448c-a407-321a12d33b00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.759092] env[62914]: DEBUG nova.compute.provider_tree [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1234.785916] env[62914]: DEBUG oslo_concurrency.lockutils [None req-06ecb4e2-6a7c-4b51-9e9a-aa54eb3de003 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.743s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.294176] env[62914]: DEBUG nova.scheduler.client.report [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Updated inventory for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1235.294478] env[62914]: DEBUG nova.compute.provider_tree [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Updating resource provider f2f7a014-852b-4b37-9610-c5761f4b0175 generation from 163 to 164 during operation: update_inventory {{(pid=62914) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1235.294722] env[62914]: DEBUG nova.compute.provider_tree [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Updating inventory in ProviderTree for provider f2f7a014-852b-4b37-9610-c5761f4b0175 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1235.330326] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.330553] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.330742] env[62914]: DEBUG nova.compute.manager [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1235.331696] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef1b370-f55b-4ea3-bb56-96ec7f7e3da1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.339429] env[62914]: DEBUG nova.compute.manager [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1235.340061] env[62914]: DEBUG nova.objects.instance [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.554283] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dfd5f1-e155-46a9-bc37-8b16ee570840 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.573395] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.573775] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1235.575055] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance '8d764dc0-133c-4d0d-a8e2-da82270252ec' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1235.800391] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.821821] env[62914]: INFO nova.scheduler.client.report [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted allocations for instance cad97698-d68d-42de-a4de-772917e60374 [ 1235.845411] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1235.845694] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83ccf01c-5d6c-4447-8781-0c46f8f17760 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.853973] env[62914]: DEBUG oslo_vmware.api [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1235.853973] env[62914]: value = "task-4832895" [ 1235.853973] env[62914]: _type = "Task" [ 1235.853973] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.864646] env[62914]: DEBUG oslo_vmware.api [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.082826] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1236.083415] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbd683aa-2dd8-4b67-9c51-3c6c7a9624b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.092623] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1236.092623] env[62914]: value = "task-4832896" [ 1236.092623] env[62914]: _type = "Task" [ 1236.092623] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.106363] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.331112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-28947353-70ab-44d2-a930-dda8c53b5e12 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "cad97698-d68d-42de-a4de-772917e60374" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.324s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.369520] env[62914]: DEBUG oslo_vmware.api [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832895, 'name': PowerOffVM_Task, 'duration_secs': 0.203577} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.370421] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1236.370786] env[62914]: DEBUG nova.compute.manager [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1236.372069] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528452f6-7749-4324-a706-260bf7a76044 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.603656] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832896, 'name': PowerOffVM_Task, 'duration_secs': 0.20726} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.604151] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1236.604151] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance '8d764dc0-133c-4d0d-a8e2-da82270252ec' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1236.887234] env[62914]: DEBUG oslo_concurrency.lockutils [None req-de71de91-b58f-4089-b630-8d643202bf93 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.556s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.110051] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1237.110279] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1237.110449] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.110642] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1237.110797] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.110955] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1237.111192] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1237.111374] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1237.111526] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1237.111703] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1237.111886] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1237.117635] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2d8337b-5826-4aba-b1e1-faf2b8a690fb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.138269] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1237.138269] env[62914]: value = "task-4832897" [ 1237.138269] env[62914]: _type = "Task" [ 1237.138269] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.146298] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832897, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.647348] env[62914]: DEBUG oslo_vmware.api [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832897, 'name': ReconfigVM_Task, 'duration_secs': 0.165795} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.647680] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance '8d764dc0-133c-4d0d-a8e2-da82270252ec' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1237.811916] env[62914]: DEBUG nova.objects.instance [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.155027] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1238.155284] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1238.155453] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.155712] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1238.155811] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.155966] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1238.156195] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1238.156361] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1238.156542] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1238.156708] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1238.156903] env[62914]: DEBUG nova.virt.hardware [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1238.317536] env[62914]: DEBUG oslo_concurrency.lockutils [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.317730] env[62914]: DEBUG oslo_concurrency.lockutils [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.317913] env[62914]: DEBUG nova.network.neutron [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1238.318530] env[62914]: DEBUG nova.objects.instance [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'info_cache' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.329882] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.329882] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.666338] env[62914]: ERROR nova.compute.manager [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Setting instance vm_state to ERROR: AttributeError: 'NoneType' object has no attribute 'key' [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Traceback (most recent call last): [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/compute/manager.py", line 10941, in _error_out_instance_on_exception [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] yield [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/compute/manager.py", line 6151, in _resize_instance [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] disk_info = self.driver.migrate_disk_and_power_off( [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] return self._vmops.migrate_disk_and_power_off(context, instance, [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] disk_key = device.key [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] AttributeError: 'NoneType' object has no attribute 'key' [ 1238.666338] env[62914]: ERROR nova.compute.manager [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] [ 1238.821964] env[62914]: DEBUG nova.objects.base [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Object Instance<74e7896c-8a1f-448d-a44b-e6febfff9000> lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1238.832276] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1239.188928] env[62914]: INFO nova.compute.manager [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Swapping old allocation on dict_keys(['f2f7a014-852b-4b37-9610-c5761f4b0175']) held by migration 4d40a855-6620-4691-8a20-cd42b9d11d49 for instance [ 1239.213930] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Overwriting current allocation {'allocations': {'f2f7a014-852b-4b37-9610-c5761f4b0175': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 165}}, 'project_id': 'd141c01c1d5848eea6ef2b831e431ba5', 'user_id': 'ad6739a790d54c98b39ff51cf254379c', 'consumer_generation': 1} on consumer 8d764dc0-133c-4d0d-a8e2-da82270252ec {{(pid=62914) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1239.352778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.352778] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.354329] env[62914]: INFO nova.compute.claims [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1239.582248] env[62914]: DEBUG nova.network.neutron [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [{"id": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "address": "fa:16:3e:5e:12:de", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e05c7fc-1e", "ovs_interfaceid": "5e05c7fc-1efe-4e76-b521-ac8bcee07403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.086071] env[62914]: DEBUG oslo_concurrency.lockutils [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "refresh_cache-74e7896c-8a1f-448d-a44b-e6febfff9000" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.440137] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5743da-1826-427e-96f4-d2e55b2647c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.448153] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6599d1-e059-4ba9-9d9e-bc0657765681 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.478893] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dde15b-238a-499c-beb1-d63257fb4fd9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.486724] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d580af-04e8-428d-8d40-f7c5581fa9a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.490748] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.490987] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.491211] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.491395] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.491566] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.493667] env[62914]: INFO nova.compute.manager [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Terminating instance [ 1240.495773] env[62914]: DEBUG nova.compute.manager [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1240.495993] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1240.503695] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54a2d5c6-4995-4040-99bf-41231d7891e2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.505662] env[62914]: DEBUG nova.compute.provider_tree [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.511887] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1240.511887] env[62914]: value = "task-4832898" [ 1240.511887] env[62914]: _type = "Task" [ 1240.511887] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.520432] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.589744] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1240.589744] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd027d15-1283-4b29-957e-e1aad65bc9ca {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.596761] env[62914]: DEBUG oslo_vmware.api [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1240.596761] env[62914]: value = "task-4832899" [ 1240.596761] env[62914]: _type = "Task" [ 1240.596761] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.604707] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Didn't find any instances for network info cache update. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10090}} [ 1240.604951] env[62914]: DEBUG oslo_vmware.api [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.605211] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.605387] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.605544] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.605692] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1240.605893] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.774437] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.009081] env[62914]: DEBUG nova.scheduler.client.report [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1241.024925] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] VM already powered off {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1241.025278] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1241.025463] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942100', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'name': 'volume-6704424c-ebf8-4ade-9901-bcd14b7d5207', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d764dc0-133c-4d0d-a8e2-da82270252ec', 'attached_at': '', 'detached_at': '', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'serial': '6704424c-ebf8-4ade-9901-bcd14b7d5207'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1241.026360] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d3aba9-23a8-4aba-80b4-9ea1612d8a2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.046937] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb0018f-5368-412f-aa4b-54be727cad45 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.054269] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d4b878-2dae-4738-9701-127d405f096a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.073013] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5189f4a8-7ae8-4597-9c50-43f5f5d12347 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.090609] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] The volume has not been displaced from its original location: [datastore2] volume-6704424c-ebf8-4ade-9901-bcd14b7d5207/volume-6704424c-ebf8-4ade-9901-bcd14b7d5207.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1241.095873] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.096258] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-227130f1-f913-4f80-b222-01618696f563 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.110220] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.119828] env[62914]: DEBUG oslo_vmware.api [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832899, 'name': PowerOnVM_Task, 'duration_secs': 0.383954} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.120245] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1241.120454] env[62914]: DEBUG nova.compute.manager [None req-85282a10-d7b3-4bb4-b363-81bde1290f65 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1241.122829] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e119fec3-5674-44bd-a021-12590c08f95d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.125876] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1241.125876] env[62914]: value = "task-4832900" [ 1241.125876] env[62914]: _type = "Task" [ 1241.125876] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.141032] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832900, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.519053] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.166s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.519372] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1241.522189] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.748s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.618398] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba9bd06-3672-4ac7-a407-91b4ac3c38c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.626774] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be09b74-08cb-4231-825c-2acd0ba59bde {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.664238] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832900, 'name': ReconfigVM_Task, 'duration_secs': 0.199385} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.665882] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.671137] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d06134-17d0-43df-8853-2158c577608c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.674148] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bced8eb6-9e4f-4f48-bb67-834257659136 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.694670] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a12e086-f415-4057-ad8c-23f9509397a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.698684] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1241.698684] env[62914]: value = "task-4832901" [ 1241.698684] env[62914]: _type = "Task" [ 1241.698684] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.710791] env[62914]: DEBUG nova.compute.provider_tree [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.717724] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.793881] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.027675] env[62914]: DEBUG nova.compute.utils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1242.029493] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1242.029671] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1242.076651] env[62914]: DEBUG nova.policy [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ea29d6698d4734a5def35fe065fe21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b59bf6daf8c246f7b034dc0adcfc8cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1242.210310] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832901, 'name': ReconfigVM_Task, 'duration_secs': 0.114231} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.210828] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942100', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'name': 'volume-6704424c-ebf8-4ade-9901-bcd14b7d5207', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d764dc0-133c-4d0d-a8e2-da82270252ec', 'attached_at': '', 'detached_at': '', 'volume_id': '6704424c-ebf8-4ade-9901-bcd14b7d5207', 'serial': '6704424c-ebf8-4ade-9901-bcd14b7d5207'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1242.211091] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1242.211862] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72752b7e-6f04-4e67-b375-088a2b126b00 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.215465] env[62914]: DEBUG nova.scheduler.client.report [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1242.223728] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1242.224006] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e53091d-5549-41dd-b3b4-8e1ad876ec34 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.289327] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1242.289584] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1242.289743] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore2] 8d764dc0-133c-4d0d-a8e2-da82270252ec {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1242.290041] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61b530f1-f030-4d9a-8ca8-711048026ea2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.296782] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1242.296782] env[62914]: value = "task-4832903" [ 1242.296782] env[62914]: _type = "Task" [ 1242.296782] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.305778] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.374153] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Successfully created port: 1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.532432] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1242.720210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.198s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.720513] env[62914]: INFO nova.compute.manager [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Successfully reverted task state from resize_migrating on failure for instance. [ 1242.728419] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.618s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.728605] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.728764] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1242.729645] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bb6087-a815-418e-b0fd-17c29f8903e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server [None req-39fa40f6-91a3-4a26-b63e-c753a2174b9d tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Exception during message handling: AttributeError: 'NoneType' object has no attribute 'key' [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6117, in resize_instance [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6114, in resize_instance [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6151, in _resize_instance [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server return self._vmops.migrate_disk_and_power_off(context, instance, [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server disk_key = device.key [ 1242.733331] env[62914]: ERROR oslo_messaging.rpc.server AttributeError: 'NoneType' object has no attribute 'key' [ 1242.735438] env[62914]: ERROR oslo_messaging.rpc.server [ 1242.739477] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd98607b-f401-4660-9512-98e7fd9c284c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.755732] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06890588-5ba8-481b-acc8-9e20c5c501b2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.762987] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cbfa8a-50e2-4f53-b38f-5fd20a1c9bfd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.793797] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179281MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1242.794025] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.794167] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.805849] env[62914]: DEBUG oslo_vmware.api [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089047} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.806150] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1242.806351] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1242.806539] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1242.806720] env[62914]: INFO nova.compute.manager [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1242.806978] env[62914]: DEBUG oslo.service.loopingcall [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.807198] env[62914]: DEBUG nova.compute.manager [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1242.807295] env[62914]: DEBUG nova.network.neutron [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1243.133731] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.134086] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.134351] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.134591] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.134844] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.137623] env[62914]: INFO nova.compute.manager [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Terminating instance [ 1243.139636] env[62914]: DEBUG nova.compute.manager [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1243.139885] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1243.140806] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f327cc1c-affa-4a2e-90ff-1d594e83e78f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.149208] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1243.149476] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf0281c9-b7d5-46bf-8e79-b97ac4a7d18b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.157093] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1243.157093] env[62914]: value = "task-4832904" [ 1243.157093] env[62914]: _type = "Task" [ 1243.157093] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.166787] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.364660] env[62914]: DEBUG nova.compute.manager [req-bd2d9bdf-4743-4ae5-a5e2-0c685c5411a8 req-9fe71db0-5435-42e3-a264-c3095e8171dd service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Received event network-vif-deleted-d93b160a-3672-4d89-8d58-d29cca7cd2cf {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1243.364939] env[62914]: INFO nova.compute.manager [req-bd2d9bdf-4743-4ae5-a5e2-0c685c5411a8 req-9fe71db0-5435-42e3-a264-c3095e8171dd service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Neutron deleted interface d93b160a-3672-4d89-8d58-d29cca7cd2cf; detaching it from the instance and deleting it from the info cache [ 1243.365116] env[62914]: DEBUG nova.network.neutron [req-bd2d9bdf-4743-4ae5-a5e2-0c685c5411a8 req-9fe71db0-5435-42e3-a264-c3095e8171dd service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.543630] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1243.572569] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1243.572837] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1243.572996] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1243.573206] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1243.573361] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1243.573517] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1243.573733] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1243.573899] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1243.574083] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1243.574256] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1243.574435] env[62914]: DEBUG nova.virt.hardware [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1243.575362] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55411613-8542-4ec6-84d3-d3a09c28f458 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.585791] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dc150d-6549-4ade-84b6-e65d35b991d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.668060] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832904, 'name': PowerOffVM_Task, 'duration_secs': 0.188209} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.668388] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1243.668565] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1243.668829] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c217f402-d7e0-4420-b433-92b286bcc3d8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.736059] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1243.736334] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1243.736538] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore2] 4b76e4eb-5d56-4eb0-82fc-47661dbc7239 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1243.736801] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-faa86a0e-8161-495d-9aa3-f938393b7008 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.744046] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1243.744046] env[62914]: value = "task-4832906" [ 1243.744046] env[62914]: _type = "Task" [ 1243.744046] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.754382] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.804429] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Applying migration context for instance 8d764dc0-133c-4d0d-a8e2-da82270252ec as it has an incoming, in-progress migration 4d40a855-6620-4691-8a20-cd42b9d11d49. Migration status is error {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1243.805523] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62914) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1566}} [ 1243.823620] env[62914]: DEBUG nova.network.neutron [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.840032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 74e7896c-8a1f-448d-a44b-e6febfff9000 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 4b76e4eb-5d56-4eb0-82fc-47661dbc7239 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 9c98dff8-29b4-4f01-b110-6a89c5276d7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840032] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance f35f819b-276a-4dc4-9cda-9b9a02d16c02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840350] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance 8d764dc0-133c-4d0d-a8e2-da82270252ec actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1243.840388] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1243.840527] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '6', 'num_vm_active': '4', 'num_task_None': '3', 'num_os_type_None': '6', 'num_proj_d141c01c1d5848eea6ef2b831e431ba5': '3', 'io_workload': '1', 'num_proj_1780142384594b1dabc6811b54144d56': '1', 'num_task_deleting': '2', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '1', 'num_vm_error': '1', 'num_vm_building': '1', 'num_task_block_device_mapping': '1', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1243.868636] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7caf4992-6cbc-422b-b34e-d9f5f93ccce7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.883046] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe18f94-b460-4b78-b88b-cf6b6860e5ed {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.920936] env[62914]: DEBUG nova.compute.manager [req-bd2d9bdf-4743-4ae5-a5e2-0c685c5411a8 req-9fe71db0-5435-42e3-a264-c3095e8171dd service nova] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Detach interface failed, port_id=d93b160a-3672-4d89-8d58-d29cca7cd2cf, reason: Instance 8d764dc0-133c-4d0d-a8e2-da82270252ec could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1243.926157] env[62914]: DEBUG nova.compute.manager [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Received event network-vif-plugged-1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1243.926457] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.926734] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.926939] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.927140] env[62914]: DEBUG nova.compute.manager [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] No waiting events found dispatching network-vif-plugged-1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1243.927320] env[62914]: WARNING nova.compute.manager [req-7c92039c-e4c9-422f-900c-901ae2ef6df7 req-db1c5494-5853-46f7-91ce-93edb0fd215c service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Received unexpected event network-vif-plugged-1555b103-122e-466e-838b-ce49a0c203ae for instance with vm_state building and task_state spawning. [ 1243.969798] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee9828f-706e-444c-b337-c39de4f8329a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.978734] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bc3cce-f36f-45a0-abeb-09e6d59727db {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.012027] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a5499f-8b50-4b67-b37f-54cf10db9e3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.020579] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30050970-c36e-47a2-9241-b057638aebf3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.048022] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.254201] env[62914]: DEBUG oslo_vmware.api [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169746} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.254485] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.254731] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1244.254952] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1244.255157] env[62914]: INFO nova.compute.manager [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1244.255403] env[62914]: DEBUG oslo.service.loopingcall [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1244.255631] env[62914]: DEBUG nova.compute.manager [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1244.255743] env[62914]: DEBUG nova.network.neutron [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1244.326612] env[62914]: INFO nova.compute.manager [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 1.52 seconds to deallocate network for instance. [ 1244.418910] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Successfully updated port: 1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1244.448883] env[62914]: DEBUG nova.compute.manager [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Received event network-changed-1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1244.449269] env[62914]: DEBUG nova.compute.manager [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Refreshing instance network info cache due to event network-changed-1555b103-122e-466e-838b-ce49a0c203ae. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1244.449640] env[62914]: DEBUG oslo_concurrency.lockutils [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] Acquiring lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.449934] env[62914]: DEBUG oslo_concurrency.lockutils [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] Acquired lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.450261] env[62914]: DEBUG nova.network.neutron [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Refreshing network info cache for port 1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1244.550496] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1244.877228] env[62914]: INFO nova.compute.manager [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 0.55 seconds to detach 1 volumes for instance. [ 1244.879851] env[62914]: DEBUG nova.compute.manager [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deleting volume: 6704424c-ebf8-4ade-9901-bcd14b7d5207 {{(pid=62914) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1244.921984] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.983253] env[62914]: DEBUG nova.network.neutron [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1244.996607] env[62914]: DEBUG nova.network.neutron [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.055994] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1245.056270] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.262s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.071572] env[62914]: DEBUG nova.network.neutron [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.394332] env[62914]: DEBUG nova.compute.manager [req-9a5b8879-3453-4891-a8dc-c5057d3933c7 req-4e0ff53c-8b2f-4733-8883-f08fe6d1066d service nova] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Received event network-vif-deleted-bca6528d-bcd2-409f-b91d-8d3ceb00d244 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1245.419451] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.419815] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.419815] env[62914]: DEBUG nova.objects.instance [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'resources' on Instance uuid 8d764dc0-133c-4d0d-a8e2-da82270252ec {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.499756] env[62914]: INFO nova.compute.manager [-] [instance: 4b76e4eb-5d56-4eb0-82fc-47661dbc7239] Took 1.24 seconds to deallocate network for instance. [ 1245.574949] env[62914]: DEBUG oslo_concurrency.lockutils [req-9047db1f-47e3-491a-b59b-fd83d63d60d1 req-692f5fb1-977e-4d01-a164-afb6b4adeabe service nova] Releasing lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.575463] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.575700] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1246.006369] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.018208] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.018502] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.024955] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd9c6e9-fe46-4049-8848-e17e9abc40b3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.033782] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99463138-df7d-408c-a193-aac46cfa4522 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.067572] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe83a69-70b9-45ee-97cb-9a3d779b117b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.075848] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c3425a-c217-4d9c-9e21-0482c806ed53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.092863] env[62914]: DEBUG nova.compute.provider_tree [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.112485] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1246.257474] env[62914]: DEBUG nova.network.neutron [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [{"id": "1555b103-122e-466e-838b-ce49a0c203ae", "address": "fa:16:3e:1c:02:58", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1555b103-12", "ovs_interfaceid": "1555b103-122e-466e-838b-ce49a0c203ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.524593] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.524987] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.596593] env[62914]: DEBUG nova.scheduler.client.report [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1246.760422] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.760768] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Instance network_info: |[{"id": "1555b103-122e-466e-838b-ce49a0c203ae", "address": "fa:16:3e:1c:02:58", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1555b103-12", "ovs_interfaceid": "1555b103-122e-466e-838b-ce49a0c203ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1246.761260] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:02:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1555b103-122e-466e-838b-ce49a0c203ae', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1246.768849] env[62914]: DEBUG oslo.service.loopingcall [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1246.769107] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1246.769753] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c93280e6-c62e-47bd-b762-1c608bd72621 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.790135] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1246.790135] env[62914]: value = "task-4832908" [ 1246.790135] env[62914]: _type = "Task" [ 1246.790135] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.800872] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832908, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.102046] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.682s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.104765] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.098s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.105707] env[62914]: DEBUG nova.objects.instance [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'resources' on Instance uuid 4b76e4eb-5d56-4eb0-82fc-47661dbc7239 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.124514] env[62914]: INFO nova.scheduler.client.report [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted allocations for instance 8d764dc0-133c-4d0d-a8e2-da82270252ec [ 1247.302042] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832908, 'name': CreateVM_Task, 'duration_secs': 0.333481} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.302243] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1247.303080] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.303320] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.303767] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1247.304045] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01757fdf-6e30-45f8-b971-ae66a8f038de {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.310653] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1247.310653] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14237-43d1-7c6b-4261-2ed7897600c6" [ 1247.310653] env[62914]: _type = "Task" [ 1247.310653] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.321125] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14237-43d1-7c6b-4261-2ed7897600c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.631867] env[62914]: DEBUG oslo_concurrency.lockutils [None req-1375dd2e-e78a-4d62-9e0d-8e1a909726cf tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.141s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.632863] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.839s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.633218] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.633514] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.633707] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.636645] env[62914]: INFO nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Terminating instance [ 1247.638760] env[62914]: DEBUG nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1247.639111] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d15a19ee-2c43-44c8-bd6b-d90a9f031afd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.659056] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77486ad0-802e-4cea-b868-a8f0c4686626 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.694272] env[62914]: WARNING nova.virt.vmwareapi.driver [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 8d764dc0-133c-4d0d-a8e2-da82270252ec could not be found. [ 1247.694514] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1247.697289] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18436734-0a7d-4d5a-a59f-d43575e746d9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.707024] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ca2869-83c3-4a85-bb09-5cf943f1efb4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.719284] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674e237f-3ccd-4ad0-80d9-10c33f3f5b51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.726601] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4e962b-101e-4334-8bea-1717997e9d38 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.741659] env[62914]: WARNING nova.virt.vmwareapi.vmops [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d764dc0-133c-4d0d-a8e2-da82270252ec could not be found. [ 1247.741875] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1247.742080] env[62914]: INFO nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1247.742337] env[62914]: DEBUG oslo.service.loopingcall [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1247.766265] env[62914]: DEBUG nova.compute.manager [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1247.766430] env[62914]: DEBUG nova.network.neutron [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1247.769083] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379b6ecf-1b33-4c84-a2fa-fc8f331bb074 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.777289] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b47134-c87d-4be6-afb4-b82b10c3cd58 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.791184] env[62914]: DEBUG nova.compute.provider_tree [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.821910] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52c14237-43d1-7c6b-4261-2ed7897600c6, 'name': SearchDatastore_Task, 'duration_secs': 0.01062} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.822181] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.822419] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1247.822691] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.822857] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.823062] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.823370] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b60eb07-3ad8-49c8-93d8-1c84f86e7b22 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.832756] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.832970] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1247.833750] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d99b3357-5cdb-4d29-9cc7-21adb7b3ecfe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.839659] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1247.839659] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]529fb14d-ebf4-ca25-fe2b-5b10167f5107" [ 1247.839659] env[62914]: _type = "Task" [ 1247.839659] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.850272] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529fb14d-ebf4-ca25-fe2b-5b10167f5107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.295478] env[62914]: DEBUG nova.scheduler.client.report [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1248.298641] env[62914]: DEBUG nova.network.neutron [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.350361] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]529fb14d-ebf4-ca25-fe2b-5b10167f5107, 'name': SearchDatastore_Task, 'duration_secs': 0.010364} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.351125] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1face1f1-aed1-457b-94c8-d1fb1afbb18a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.357260] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1248.357260] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc8414-1230-b6c4-d41d-1ddc21f28281" [ 1248.357260] env[62914]: _type = "Task" [ 1248.357260] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.365833] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc8414-1230-b6c4-d41d-1ddc21f28281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.800893] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.803455] env[62914]: INFO nova.compute.manager [-] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 1.04 seconds to deallocate network for instance. [ 1248.819700] env[62914]: WARNING nova.volume.cinder [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Attachment 41855a55-f2c5-4703-8079-cb6d33607e0c does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = 41855a55-f2c5-4703-8079-cb6d33607e0c. (HTTP 404) (Request-ID: req-ad6d2ed9-e7ae-40b6-b96a-50b0fff1f98a) [ 1248.820032] env[62914]: INFO nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Took 0.02 seconds to detach 1 volumes for instance. [ 1248.822357] env[62914]: DEBUG nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Deleting volume: 6704424c-ebf8-4ade-9901-bcd14b7d5207 {{(pid=62914) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1248.826355] env[62914]: INFO nova.scheduler.client.report [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted allocations for instance 4b76e4eb-5d56-4eb0-82fc-47661dbc7239 [ 1248.844444] env[62914]: WARNING nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Failed to delete volume: 6704424c-ebf8-4ade-9901-bcd14b7d5207 due to Volume 6704424c-ebf8-4ade-9901-bcd14b7d5207 could not be found.: nova.exception.VolumeNotFound: Volume 6704424c-ebf8-4ade-9901-bcd14b7d5207 could not be found. [ 1248.869135] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52cc8414-1230-b6c4-d41d-1ddc21f28281, 'name': SearchDatastore_Task, 'duration_secs': 0.011966} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.869873] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.869873] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] f35f819b-276a-4dc4-9cda-9b9a02d16c02/f35f819b-276a-4dc4-9cda-9b9a02d16c02.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1248.870420] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6e51400-3e43-44d7-b953-a924ea17295d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.880945] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1248.880945] env[62914]: value = "task-4832909" [ 1248.880945] env[62914]: _type = "Task" [ 1248.880945] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.890619] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.338624] env[62914]: DEBUG oslo_concurrency.lockutils [None req-022bf0ee-9630-4a20-9a3d-469444333da4 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "4b76e4eb-5d56-4eb0-82fc-47661dbc7239" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.204s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.350354] env[62914]: INFO nova.compute.manager [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 8d764dc0-133c-4d0d-a8e2-da82270252ec] Instance disappeared during terminate [ 1249.350620] env[62914]: DEBUG oslo_concurrency.lockutils [None req-16fefb72-46ca-4f60-a145-a58c6367a008 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "8d764dc0-133c-4d0d-a8e2-da82270252ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.718s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.391812] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832909, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.893030] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832909, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.392408] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832909, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.892756] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832909, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.567118} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.893087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] f35f819b-276a-4dc4-9cda-9b9a02d16c02/f35f819b-276a-4dc4-9cda-9b9a02d16c02.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1250.893382] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1250.893582] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66fa9814-775e-4ae2-933d-750de82c0a9e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.900785] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1250.900785] env[62914]: value = "task-4832911" [ 1250.900785] env[62914]: _type = "Task" [ 1250.900785] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.908557] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.411412] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063069} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.411705] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1251.412531] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b8691a-1e57-4f06-a1be-3d63f23a4dac {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.435099] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] f35f819b-276a-4dc4-9cda-9b9a02d16c02/f35f819b-276a-4dc4-9cda-9b9a02d16c02.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.435359] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee237cf9-9f95-42ac-a189-7c3593411a5c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.456021] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1251.456021] env[62914]: value = "task-4832912" [ 1251.456021] env[62914]: _type = "Task" [ 1251.456021] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.464323] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832912, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.966073] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832912, 'name': ReconfigVM_Task, 'duration_secs': 0.301962} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.966514] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfigured VM instance instance-0000007b to attach disk [datastore2] f35f819b-276a-4dc4-9cda-9b9a02d16c02/f35f819b-276a-4dc4-9cda-9b9a02d16c02.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.966846] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74befd68-2900-4934-9e0b-5851baa78648 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.974115] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1251.974115] env[62914]: value = "task-4832913" [ 1251.974115] env[62914]: _type = "Task" [ 1251.974115] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.982037] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832913, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.485775] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832913, 'name': Rename_Task, 'duration_secs': 0.136996} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.486113] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1252.486403] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-006ecfee-a85f-47be-84e9-7fb5967923c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.493693] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1252.493693] env[62914]: value = "task-4832914" [ 1252.493693] env[62914]: _type = "Task" [ 1252.493693] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.504429] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.881702] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.882123] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.882414] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.882685] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.882953] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.885549] env[62914]: INFO nova.compute.manager [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Terminating instance [ 1252.888062] env[62914]: DEBUG nova.compute.manager [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1252.888340] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1252.889286] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcaf0ff-41f5-42e1-8453-3d9f2ed7c537 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.898919] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1252.899210] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-046d01a3-af57-44ea-baf8-00538b7da785 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.906850] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1252.906850] env[62914]: value = "task-4832915" [ 1252.906850] env[62914]: _type = "Task" [ 1252.906850] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.916196] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.004261] env[62914]: DEBUG oslo_vmware.api [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832914, 'name': PowerOnVM_Task, 'duration_secs': 0.444752} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.004638] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1253.004842] env[62914]: INFO nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1253.005068] env[62914]: DEBUG nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1253.005917] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f8e653-87d6-4eb4-bce4-3889a8a59ecc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.418043] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832915, 'name': PowerOffVM_Task, 'duration_secs': 0.223342} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.418373] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1253.418545] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1253.418809] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd601f5b-edf0-416d-9e15-b246b9608e1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.482697] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1253.482966] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1253.483159] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleting the datastore file [datastore2] 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.483439] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6c3e0a5-a6dc-47f3-aa8b-7863fc5490a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.489973] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for the task: (returnval){ [ 1253.489973] env[62914]: value = "task-4832917" [ 1253.489973] env[62914]: _type = "Task" [ 1253.489973] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.497859] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.524204] env[62914]: INFO nova.compute.manager [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Took 14.19 seconds to build instance. [ 1253.987900] env[62914]: DEBUG nova.compute.manager [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Received event network-changed-1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1253.988109] env[62914]: DEBUG nova.compute.manager [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Refreshing instance network info cache due to event network-changed-1555b103-122e-466e-838b-ce49a0c203ae. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1253.988396] env[62914]: DEBUG oslo_concurrency.lockutils [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] Acquiring lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.988640] env[62914]: DEBUG oslo_concurrency.lockutils [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] Acquired lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.988927] env[62914]: DEBUG nova.network.neutron [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Refreshing network info cache for port 1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1254.000866] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.026131] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2045cd91-d083-4979-ba2b-395865527ff7 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.696s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.502418] env[62914]: DEBUG oslo_vmware.api [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Task: {'id': task-4832917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.539629} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.502808] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.503152] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1254.503424] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1254.503627] env[62914]: INFO nova.compute.manager [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1254.503884] env[62914]: DEBUG oslo.service.loopingcall [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1254.504106] env[62914]: DEBUG nova.compute.manager [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1254.504204] env[62914]: DEBUG nova.network.neutron [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1254.707944] env[62914]: DEBUG nova.network.neutron [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updated VIF entry in instance network info cache for port 1555b103-122e-466e-838b-ce49a0c203ae. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1254.708387] env[62914]: DEBUG nova.network.neutron [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [{"id": "1555b103-122e-466e-838b-ce49a0c203ae", "address": "fa:16:3e:1c:02:58", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1555b103-12", "ovs_interfaceid": "1555b103-122e-466e-838b-ce49a0c203ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.214125] env[62914]: DEBUG oslo_concurrency.lockutils [req-d32b4f2f-dfdc-47da-ba15-15bbe250256b req-1ac84bf8-1564-4f8b-8d4a-23c886e5e2f5 service nova] Releasing lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.236863] env[62914]: DEBUG nova.network.neutron [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.739965] env[62914]: INFO nova.compute.manager [-] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Took 1.24 seconds to deallocate network for instance. [ 1256.014011] env[62914]: DEBUG nova.compute.manager [req-26a221aa-272b-4a15-ab13-89ab303c96f1 req-f9a8e7fb-9ee9-4007-891b-19a9dc3478f6 service nova] [instance: 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6] Received event network-vif-deleted-23732df6-58dd-4637-9c04-c25d6b049c91 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1256.247081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.247654] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.247654] env[62914]: DEBUG nova.objects.instance [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lazy-loading 'resources' on Instance uuid 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.819329] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5290c90e-114f-4b9b-9080-3c8ff9aecd92 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.828166] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8ec6ec-64d2-4752-80a3-8b9f088a47d6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.859964] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d40a0e-727d-4ea4-b0da-63ab92c9cad9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.867712] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2722eccd-7554-4a3a-8bb4-107ed5b0d511 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.882072] env[62914]: DEBUG nova.compute.provider_tree [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.385668] env[62914]: DEBUG nova.scheduler.client.report [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.890788] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.911075] env[62914]: INFO nova.scheduler.client.report [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Deleted allocations for instance 7aa4401b-60e5-41b8-b4de-b4fb5ab799c6 [ 1258.418851] env[62914]: DEBUG oslo_concurrency.lockutils [None req-8b5d8834-f3c5-4e2c-95f3-9f8ef2603d59 tempest-ServerActionsTestOtherA-348254139 tempest-ServerActionsTestOtherA-348254139-project-member] Lock "7aa4401b-60e5-41b8-b4de-b4fb5ab799c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.537s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.585592] env[62914]: DEBUG nova.compute.manager [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Stashing vm_state: active {{(pid=62914) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1259.106655] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.106948] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.612723] env[62914]: INFO nova.compute.claims [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.124893] env[62914]: INFO nova.compute.resource_tracker [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating resource usage from migration de5c4fe1-7ee8-4165-9d23-db915407f987 [ 1260.198793] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499371a5-2f4e-4680-8ba3-122121d2f01b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.207399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c424cc57-c97d-49a6-a317-ab5260b74462 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.241549] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d97223-e5d3-4d2a-aafe-faa54368f157 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.250621] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882a3ab0-f97e-4779-b4fb-3e63cf2b9a77 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.265655] env[62914]: DEBUG nova.compute.provider_tree [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.769276] env[62914]: DEBUG nova.scheduler.client.report [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1261.275064] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.168s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.275316] env[62914]: INFO nova.compute.manager [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Migrating [ 1261.791434] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.791868] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.791868] env[62914]: DEBUG nova.network.neutron [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1262.527889] env[62914]: DEBUG nova.network.neutron [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.030537] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.545452] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e74077b-de29-4d9d-ae9d-57d26dffd286 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.565014] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 0 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1265.072190] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1265.072512] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c632604b-92e1-4301-a2d6-08ba12785f21 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.080724] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1265.080724] env[62914]: value = "task-4832918" [ 1265.080724] env[62914]: _type = "Task" [ 1265.080724] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.089186] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.590649] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832918, 'name': PowerOffVM_Task, 'duration_secs': 0.214937} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.591158] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1265.591257] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 17 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1266.097984] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:21:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1266.098251] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1266.098304] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.098448] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1266.098599] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.098758] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1266.098967] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1266.099151] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1266.099327] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1266.099498] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1266.099679] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1266.104583] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f32611-3b72-4fc6-a292-6fa0f421777a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.120448] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1266.120448] env[62914]: value = "task-4832919" [ 1266.120448] env[62914]: _type = "Task" [ 1266.120448] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.128718] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.630929] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832919, 'name': ReconfigVM_Task, 'duration_secs': 0.246765} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.631326] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 33 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1267.137643] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1267.137976] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1267.138178] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.138376] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1267.138534] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.138692] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1267.138911] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1267.139175] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1267.139458] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1267.139668] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1267.139853] env[62914]: DEBUG nova.virt.hardware [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1267.145304] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1267.145624] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adda552a-42d3-48a1-a8af-da3e19498e6d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.167195] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1267.167195] env[62914]: value = "task-4832920" [ 1267.167195] env[62914]: _type = "Task" [ 1267.167195] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.175993] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832920, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.677789] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832920, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.179038] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832920, 'name': ReconfigVM_Task, 'duration_secs': 0.514463} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.179242] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1268.180020] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf2caae-993c-4d3a-b9c2-d2cee036eedf {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.203258] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1268.203627] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18571533-41d8-4edc-95a4-6aed5b8082d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.222682] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1268.222682] env[62914]: value = "task-4832921" [ 1268.222682] env[62914]: _type = "Task" [ 1268.222682] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.231083] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832921, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.733931] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832921, 'name': ReconfigVM_Task, 'duration_secs': 0.312496} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.734332] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1268.734526] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 50 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1269.242768] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6e4af8-b233-42c9-8891-27e45ab00271 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.276252] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc94a2ee-023c-4c47-aebf-7d8f50428f09 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.295482] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 67 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1269.858854] env[62914]: DEBUG nova.network.neutron [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Port 8022bac9-6ae9-47da-a35b-34baa22c828e binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1270.881819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.882217] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.882268] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.918682] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1271.918899] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.919079] env[62914]: DEBUG nova.network.neutron [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1272.639162] env[62914]: DEBUG nova.network.neutron [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.142231] env[62914]: DEBUG oslo_concurrency.lockutils [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.676228] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dde4c5a-c716-4913-b1be-a17e08d28601 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.696063] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b86140c-96e3-409e-9978-7779feb65e4a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.703228] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 83 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1274.209288] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1274.209725] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2329a3f6-dd9a-4bf2-a202-fcbb70d86be2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.217258] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1274.217258] env[62914]: value = "task-4832922" [ 1274.217258] env[62914]: _type = "Task" [ 1274.217258] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.226144] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.727813] env[62914]: DEBUG oslo_vmware.api [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832922, 'name': PowerOnVM_Task, 'duration_secs': 0.476468} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.728105] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1274.728310] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-65f740b5-7b04-4436-9277-717729c845b7 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance '9c98dff8-29b4-4f01-b110-6a89c5276d7b' progress to 100 {{(pid=62914) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1277.158852] env[62914]: DEBUG nova.network.neutron [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Port 8022bac9-6ae9-47da-a35b-34baa22c828e binding to destination host cpu-1 is already ACTIVE {{(pid=62914) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1277.159192] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.159314] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.159484] env[62914]: DEBUG nova.network.neutron [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1277.985684] env[62914]: DEBUG nova.network.neutron [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.099169] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.099441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.099672] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.099861] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.100052] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.102732] env[62914]: INFO nova.compute.manager [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Terminating instance [ 1278.104558] env[62914]: DEBUG nova.compute.manager [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1278.104766] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1278.105637] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded17d91-3bd5-4a7a-84ac-7c2062fb2343 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.113306] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1278.113548] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b08e4710-4f36-49b6-a3ea-f3cc2994182e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.120546] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1278.120546] env[62914]: value = "task-4832923" [ 1278.120546] env[62914]: _type = "Task" [ 1278.120546] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.128827] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.489770] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.630735] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832923, 'name': PowerOffVM_Task, 'duration_secs': 0.221554} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.631311] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1278.631494] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1278.631765] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ece79803-c849-4e76-9b34-2cefc5ef7283 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.693165] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1278.693427] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1278.693627] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleting the datastore file [datastore2] 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1278.693900] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e131d39-3c36-4c29-9355-57e74b5173b0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.700733] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1278.700733] env[62914]: value = "task-4832925" [ 1278.700733] env[62914]: _type = "Task" [ 1278.700733] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.708557] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.993759] env[62914]: DEBUG nova.compute.manager [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62914) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1278.994041] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.994288] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.212679] env[62914]: DEBUG oslo_vmware.api [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149162} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.212965] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1279.213214] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1279.213406] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1279.213590] env[62914]: INFO nova.compute.manager [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1279.213853] env[62914]: DEBUG oslo.service.loopingcall [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.214088] env[62914]: DEBUG nova.compute.manager [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1279.214202] env[62914]: DEBUG nova.network.neutron [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1279.497699] env[62914]: DEBUG nova.objects.instance [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'migration_context' on Instance uuid 9c98dff8-29b4-4f01-b110-6a89c5276d7b {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.656783] env[62914]: DEBUG nova.compute.manager [req-f413d6e0-05dd-43f7-bfd1-29fd4decd7f1 req-40ff5f05-daab-46d8-89fc-e13f376b5c85 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Received event network-vif-deleted-5e05c7fc-1efe-4e76-b521-ac8bcee07403 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1279.656783] env[62914]: INFO nova.compute.manager [req-f413d6e0-05dd-43f7-bfd1-29fd4decd7f1 req-40ff5f05-daab-46d8-89fc-e13f376b5c85 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Neutron deleted interface 5e05c7fc-1efe-4e76-b521-ac8bcee07403; detaching it from the instance and deleting it from the info cache [ 1279.656783] env[62914]: DEBUG nova.network.neutron [req-f413d6e0-05dd-43f7-bfd1-29fd4decd7f1 req-40ff5f05-daab-46d8-89fc-e13f376b5c85 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.069785] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3263e8-e81e-4e77-b461-9ded20b8a7c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.078137] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06557f0-9375-4ec1-890c-4bdd18be1b32 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.109243] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f4dbbf-e011-4b7d-a862-cd317036fe07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.118021] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15925dcb-c271-4919-9792-6e1eea78cda7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.132252] env[62914]: DEBUG nova.network.neutron [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.133797] env[62914]: DEBUG nova.compute.provider_tree [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.159579] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a9e6837-10f5-4c1b-b624-45399fa9b1f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.170123] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8747da-c4e1-4135-a2c2-aab10114f176 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.198620] env[62914]: DEBUG nova.compute.manager [req-f413d6e0-05dd-43f7-bfd1-29fd4decd7f1 req-40ff5f05-daab-46d8-89fc-e13f376b5c85 service nova] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Detach interface failed, port_id=5e05c7fc-1efe-4e76-b521-ac8bcee07403, reason: Instance 74e7896c-8a1f-448d-a44b-e6febfff9000 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1280.636601] env[62914]: INFO nova.compute.manager [-] [instance: 74e7896c-8a1f-448d-a44b-e6febfff9000] Took 1.42 seconds to deallocate network for instance. [ 1280.637569] env[62914]: DEBUG nova.scheduler.client.report [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1281.148789] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.649669] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.655s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.655438] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.507s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.655703] env[62914]: DEBUG nova.objects.instance [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'resources' on Instance uuid 74e7896c-8a1f-448d-a44b-e6febfff9000 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1282.237986] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cc4a4e-c5ad-4fa8-b622-7d0cf172db01 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.246983] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb934425-0867-4a6d-b75c-9d312017760d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.279394] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf7e0cc-9bca-4881-9365-d5b5cb6584a0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.287714] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ace0983-512a-4a93-a425-57e75e770172 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.302639] env[62914]: DEBUG nova.compute.provider_tree [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.805750] env[62914]: DEBUG nova.scheduler.client.report [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.189685] env[62914]: INFO nova.compute.manager [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Swapping old allocation on dict_keys(['f2f7a014-852b-4b37-9610-c5761f4b0175']) held by migration de5c4fe1-7ee8-4165-9d23-db915407f987 for instance [ 1283.214716] env[62914]: DEBUG nova.scheduler.client.report [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Overwriting current allocation {'allocations': {'f2f7a014-852b-4b37-9610-c5761f4b0175': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 168}}, 'project_id': '5ae1b7abf6f24eccb2b44d82687deb76', 'user_id': 'f4f1342629ac4aee802a2b69a5459827', 'consumer_generation': 1} on consumer 9c98dff8-29b4-4f01-b110-6a89c5276d7b {{(pid=62914) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1283.307900] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1283.308184] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.308380] env[62914]: DEBUG nova.network.neutron [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1283.310590] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.655s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.332148] env[62914]: INFO nova.scheduler.client.report [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleted allocations for instance 74e7896c-8a1f-448d-a44b-e6febfff9000 [ 1283.842574] env[62914]: DEBUG oslo_concurrency.lockutils [None req-56e05eab-69fa-4ca4-812c-29e7a01d9bc8 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "74e7896c-8a1f-448d-a44b-e6febfff9000" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.743s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.066511] env[62914]: DEBUG nova.network.neutron [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [{"id": "8022bac9-6ae9-47da-a35b-34baa22c828e", "address": "fa:16:3e:de:4f:82", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8022bac9-6a", "ovs_interfaceid": "8022bac9-6ae9-47da-a35b-34baa22c828e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.569742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-9c98dff8-29b4-4f01-b110-6a89c5276d7b" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.570368] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1284.570709] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4aae82b2-d9fc-4673-b4e2-3f270ab5afa5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.579350] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1284.579350] env[62914]: value = "task-4832926" [ 1284.579350] env[62914]: _type = "Task" [ 1284.579350] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.589580] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.089658] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832926, 'name': PowerOffVM_Task, 'duration_secs': 0.242542} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.090032] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1285.090651] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1285.090876] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1285.091063] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.091291] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1285.091450] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.091604] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1285.091810] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1285.091973] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1285.092158] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1285.092329] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1285.092502] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1285.097562] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5dd7b47-f333-4ff5-80e2-58299cc9a17b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.115633] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1285.115633] env[62914]: value = "task-4832927" [ 1285.115633] env[62914]: _type = "Task" [ 1285.115633] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.125365] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832927, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.626993] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832927, 'name': ReconfigVM_Task, 'duration_secs': 0.19095} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.627887] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24948c92-2417-438c-897e-1ddf802cede6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.647021] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1285.647289] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1285.647464] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.647727] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1285.647892] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.648098] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1285.648345] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1285.648515] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1285.648693] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1285.648878] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1285.649068] env[62914]: DEBUG nova.virt.hardware [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1285.649968] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-712f0857-1a70-4d76-936c-fea960d34128 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.656336] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1285.656336] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b86c6-b40a-1862-0e28-877aa813fa1c" [ 1285.656336] env[62914]: _type = "Task" [ 1285.656336] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.665175] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b86c6-b40a-1862-0e28-877aa813fa1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.167086] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]526b86c6-b40a-1862-0e28-877aa813fa1c, 'name': SearchDatastore_Task, 'duration_secs': 0.011546} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.172524] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1286.172841] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d332a25-058e-4f70-80fd-8c4fdf3b5ff2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.192097] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1286.192097] env[62914]: value = "task-4832928" [ 1286.192097] env[62914]: _type = "Task" [ 1286.192097] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.201013] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832928, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.509176] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.509463] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.701720] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832928, 'name': ReconfigVM_Task, 'duration_secs': 0.251538} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.702036] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1286.702846] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2ff9f6-1c0e-40d9-b296-1d18b4844170 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.724627] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.724910] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49a81454-9218-4f0b-9fd0-5b37a3504de9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.743456] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1286.743456] env[62914]: value = "task-4832929" [ 1286.743456] env[62914]: _type = "Task" [ 1286.743456] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.753576] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.011862] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1287.256104] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832929, 'name': ReconfigVM_Task, 'duration_secs': 0.388323} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.256608] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b/9c98dff8-29b4-4f01-b110-6a89c5276d7b.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.257469] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eeee4d-a3ef-4e4e-8053-5745f4a8d6c1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.277562] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df91e355-ac36-4f1c-8cf2-fbfda11c9da4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.296904] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce50c640-a72e-4634-be60-3ac8e247bd89 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.316685] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df2a8c7-b04e-4868-928d-5c96767a2238 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.324564] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1287.324850] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-526af584-1b2c-4043-964b-27de5499a1cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.332305] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1287.332305] env[62914]: value = "task-4832930" [ 1287.332305] env[62914]: _type = "Task" [ 1287.332305] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.341603] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.534823] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.534944] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.536619] env[62914]: INFO nova.compute.claims [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1287.841971] env[62914]: DEBUG oslo_vmware.api [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832930, 'name': PowerOnVM_Task, 'duration_secs': 0.415336} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.842463] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1288.595542] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dff0a3-8fa4-4ea1-a1fa-c8c3be092420 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.603555] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a55b37e-f90e-43b2-82c3-9a9c4b0a3028 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.633886] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da12453-c534-45ea-958e-eecd0d0b2b1b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.641399] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81be73e5-dc54-42b5-b522-cf485caa6161 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.654874] env[62914]: DEBUG nova.compute.provider_tree [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.853409] env[62914]: INFO nova.compute.manager [None req-2b276e7c-4c00-479a-b39f-27fdc0256f38 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance to original state: 'active' [ 1289.157636] env[62914]: DEBUG nova.scheduler.client.report [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.663165] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.127s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.663702] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1289.873956] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.874441] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.874772] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.875187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.875461] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.878866] env[62914]: INFO nova.compute.manager [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Terminating instance [ 1289.881588] env[62914]: DEBUG nova.compute.manager [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1289.881901] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1289.883195] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d395bf5-f9e6-4289-92cf-59d7d5b2ff3d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.894653] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1289.894984] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fcf5bbb-ad9e-44a1-8fb9-4de3206c2488 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.903750] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1289.903750] env[62914]: value = "task-4832931" [ 1289.903750] env[62914]: _type = "Task" [ 1289.903750] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.916371] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.170217] env[62914]: DEBUG nova.compute.utils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1290.171675] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1290.171870] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1290.219371] env[62914]: DEBUG nova.policy [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a40a14d9e19a4e2894245814173656eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1780142384594b1dabc6811b54144d56', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1290.414509] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832931, 'name': PowerOffVM_Task, 'duration_secs': 0.45219} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.414803] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1290.414981] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1290.415295] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18782e8b-9269-4428-b3d0-1088597314e5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.528481] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Successfully created port: b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1290.674787] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1291.243764] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.244125] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.625671] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1291.625844] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Deleting contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1291.626053] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleting the datastore file [datastore1] 9c98dff8-29b4-4f01-b110-6a89c5276d7b {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.626385] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ddeb9ad-a76c-4b67-8532-10a93f7654d1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.632930] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1291.632930] env[62914]: value = "task-4832933" [ 1291.632930] env[62914]: _type = "Task" [ 1291.632930] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.641494] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.684179] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1291.711148] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1291.711429] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1291.711596] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.711788] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1291.711945] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.712116] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1291.712391] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1291.712626] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1291.712858] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1291.713042] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1291.713231] env[62914]: DEBUG nova.virt.hardware [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1291.714121] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aaa8fe-34a8-45b6-a477-e477c007257a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.722675] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030becca-a951-4878-bfc6-4a220e69b216 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.747050] env[62914]: DEBUG nova.compute.utils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1291.995593] env[62914]: DEBUG nova.compute.manager [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Received event network-vif-plugged-b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1291.995960] env[62914]: DEBUG oslo_concurrency.lockutils [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.996348] env[62914]: DEBUG oslo_concurrency.lockutils [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.996659] env[62914]: DEBUG oslo_concurrency.lockutils [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.996974] env[62914]: DEBUG nova.compute.manager [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] No waiting events found dispatching network-vif-plugged-b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1291.997303] env[62914]: WARNING nova.compute.manager [req-e60946af-4499-4bf9-9a5f-5df945cd387b req-f2653f61-b5a8-474b-93e0-2c97a114edf9 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Received unexpected event network-vif-plugged-b5f7a871-c81e-497e-9960-b3b7d7981318 for instance with vm_state building and task_state spawning. [ 1292.143015] env[62914]: DEBUG oslo_vmware.api [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239746} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.143450] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.143654] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Deleted contents of the VM from datastore datastore1 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1292.143840] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1292.144038] env[62914]: INFO nova.compute.manager [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1292.144480] env[62914]: DEBUG oslo.service.loopingcall [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.144480] env[62914]: DEBUG nova.compute.manager [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1292.144640] env[62914]: DEBUG nova.network.neutron [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1292.249494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.633321] env[62914]: DEBUG nova.compute.manager [req-98a01813-a786-4059-a59e-b72fac83b26a req-1b62a0f9-8e16-42f5-9fe1-8e7c7de04f02 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Received event network-vif-deleted-8022bac9-6ae9-47da-a35b-34baa22c828e {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1292.633573] env[62914]: INFO nova.compute.manager [req-98a01813-a786-4059-a59e-b72fac83b26a req-1b62a0f9-8e16-42f5-9fe1-8e7c7de04f02 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Neutron deleted interface 8022bac9-6ae9-47da-a35b-34baa22c828e; detaching it from the instance and deleting it from the info cache [ 1292.633755] env[62914]: DEBUG nova.network.neutron [req-98a01813-a786-4059-a59e-b72fac83b26a req-1b62a0f9-8e16-42f5-9fe1-8e7c7de04f02 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.693297] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Successfully updated port: b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1292.717616] env[62914]: DEBUG nova.compute.manager [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Received event network-changed-b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1292.717616] env[62914]: DEBUG nova.compute.manager [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Refreshing instance network info cache due to event network-changed-b5f7a871-c81e-497e-9960-b3b7d7981318. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1292.717616] env[62914]: DEBUG oslo_concurrency.lockutils [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] Acquiring lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.717616] env[62914]: DEBUG oslo_concurrency.lockutils [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] Acquired lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.717616] env[62914]: DEBUG nova.network.neutron [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Refreshing network info cache for port b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1293.106511] env[62914]: DEBUG nova.network.neutron [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.136240] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cd20bbb-2faf-4482-9dc3-0f0ccab7628c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.145974] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de045894-6458-41f1-9deb-b7cd703838d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.174747] env[62914]: DEBUG nova.compute.manager [req-98a01813-a786-4059-a59e-b72fac83b26a req-1b62a0f9-8e16-42f5-9fe1-8e7c7de04f02 service nova] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Detach interface failed, port_id=8022bac9-6ae9-47da-a35b-34baa22c828e, reason: Instance 9c98dff8-29b4-4f01-b110-6a89c5276d7b could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1293.195134] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.267328] env[62914]: DEBUG nova.network.neutron [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1293.330047] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.330333] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.330679] env[62914]: INFO nova.compute.manager [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Attaching volume 4f197398-0ea7-4394-a22f-f6f4a1b8fe84 to /dev/sdb [ 1293.351682] env[62914]: DEBUG nova.network.neutron [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.371200] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe36fd8-3455-4b73-9b6f-c53938702890 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.379626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeed2ec-c334-468a-8c71-df1bce5b689b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.394415] env[62914]: DEBUG nova.virt.block_device [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating existing volume attachment record: a1a75dfc-adaf-4890-a917-1f495849a4df {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1293.609364] env[62914]: INFO nova.compute.manager [-] [instance: 9c98dff8-29b4-4f01-b110-6a89c5276d7b] Took 1.46 seconds to deallocate network for instance. [ 1293.855045] env[62914]: DEBUG oslo_concurrency.lockutils [req-fe31c146-d366-4575-be20-a0ebb97395f1 req-5ec56141-b9ef-43a9-9ef9-d3eb4e95ba07 service nova] Releasing lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.855561] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.855789] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1294.117522] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.117793] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.118039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.143211] env[62914]: INFO nova.scheduler.client.report [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted allocations for instance 9c98dff8-29b4-4f01-b110-6a89c5276d7b [ 1294.398599] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1294.536044] env[62914]: DEBUG nova.network.neutron [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [{"id": "b5f7a871-c81e-497e-9960-b3b7d7981318", "address": "fa:16:3e:89:25:e0", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5f7a871-c8", "ovs_interfaceid": "b5f7a871-c81e-497e-9960-b3b7d7981318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.652326] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f2ab9677-0e5b-4998-98d6-af026b098afa tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "9c98dff8-29b4-4f01-b110-6a89c5276d7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.778s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.039019] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.039019] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Instance network_info: |[{"id": "b5f7a871-c81e-497e-9960-b3b7d7981318", "address": "fa:16:3e:89:25:e0", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5f7a871-c8", "ovs_interfaceid": "b5f7a871-c81e-497e-9960-b3b7d7981318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1295.039511] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:25:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5f7a871-c81e-497e-9960-b3b7d7981318', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1295.047823] env[62914]: DEBUG oslo.service.loopingcall [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1295.048087] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1295.048404] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdb754ad-22cc-4406-9660-06f6471a12b6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.068474] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1295.068474] env[62914]: value = "task-4832935" [ 1295.068474] env[62914]: _type = "Task" [ 1295.068474] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.079646] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832935, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.568268] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.568532] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1295.568532] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Rebuilding the list of instances to heal {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1295.579502] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832935, 'name': CreateVM_Task, 'duration_secs': 0.324953} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.579686] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1295.580399] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.580585] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.580996] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1295.581280] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd2008c-52c9-407c-bf6d-cfcf43f229ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.586707] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1295.586707] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52637af2-59cc-1b7f-a7c2-1123adf073f7" [ 1295.586707] env[62914]: _type = "Task" [ 1295.586707] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.595270] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52637af2-59cc-1b7f-a7c2-1123adf073f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.025088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "a4a9a045-48e6-4f6e-80b6-437436346052" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.025088] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.074747] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Skipping network cache update for instance because it is Building. {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1296.098885] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52637af2-59cc-1b7f-a7c2-1123adf073f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010381} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.099263] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.099545] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1296.099829] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.100026] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.100237] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.100560] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00db4f20-2219-469f-8358-16b35964f4d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.110580] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.110788] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1296.111562] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-280a5b19-cdd6-46e4-828a-4341e59669b9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.114550] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.114686] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.114827] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1296.114980] env[62914]: DEBUG nova.objects.instance [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lazy-loading 'info_cache' on Instance uuid f35f819b-276a-4dc4-9cda-9b9a02d16c02 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.119938] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1296.119938] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]5294be8f-e92c-9113-ccc3-b93f8ab956cf" [ 1296.119938] env[62914]: _type = "Task" [ 1296.119938] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.129493] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5294be8f-e92c-9113-ccc3-b93f8ab956cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.527335] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Starting instance... {{(pid=62914) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1296.630694] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]5294be8f-e92c-9113-ccc3-b93f8ab956cf, 'name': SearchDatastore_Task, 'duration_secs': 0.01088} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.631520] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2706a3-a44b-4ec3-b676-a6f3c1ecc4f1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.637820] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1296.637820] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bcb4c6-d8bb-b1c5-09ec-c729948f4099" [ 1296.637820] env[62914]: _type = "Task" [ 1296.637820] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.646142] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bcb4c6-d8bb-b1c5-09ec-c729948f4099, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.051167] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.051464] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.053072] env[62914]: INFO nova.compute.claims [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.148073] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]52bcb4c6-d8bb-b1c5-09ec-c729948f4099, 'name': SearchDatastore_Task, 'duration_secs': 0.010917} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.148433] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.149034] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] abce5547-9f0e-4fd8-a44c-23aef12390d7/abce5547-9f0e-4fd8-a44c-23aef12390d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1297.149034] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2961dcb-6cf0-4a3f-a8c4-baaa44e78b7c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.155574] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1297.155574] env[62914]: value = "task-4832937" [ 1297.155574] env[62914]: _type = "Task" [ 1297.155574] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.165510] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.669929] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508169} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.670216] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] abce5547-9f0e-4fd8-a44c-23aef12390d7/abce5547-9f0e-4fd8-a44c-23aef12390d7.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1297.670439] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1297.670720] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1b9c5c4-12cc-499a-9e3b-4e747d088ed3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.677650] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1297.677650] env[62914]: value = "task-4832938" [ 1297.677650] env[62914]: _type = "Task" [ 1297.677650] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.687090] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.875492] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [{"id": "1555b103-122e-466e-838b-ce49a0c203ae", "address": "fa:16:3e:1c:02:58", "network": {"id": "5c3cc614-6524-49ac-a63f-af34980dc7b7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-255070113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b59bf6daf8c246f7b034dc0adcfc8cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1555b103-12", "ovs_interfaceid": "1555b103-122e-466e-838b-ce49a0c203ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.946297] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1297.946592] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942108', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'name': 'volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f35f819b-276a-4dc4-9cda-9b9a02d16c02', 'attached_at': '', 'detached_at': '', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'serial': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1297.947594] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bc5138-184b-4795-9f03-da30541f7e03 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.964983] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28c6328-2828-4346-a1c5-ac3ff6442c3c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.990273] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84/volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1297.990609] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b90be3ba-f1b2-4219-a44a-0e616eb2155a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.009506] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1298.009506] env[62914]: value = "task-4832939" [ 1298.009506] env[62914]: _type = "Task" [ 1298.009506] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.021117] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832939, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.119913] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef830fb2-3d1f-4a83-8304-b0f1dd83f272 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.128268] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4136d7e7-1b5a-48ff-bf8f-e71e2becce41 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.161295] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfe617d-d991-4fc4-ba16-ba46c9d32d53 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.169697] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1bb306-a3a1-4a63-8f46-45dc7ecfb55f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.184751] env[62914]: DEBUG nova.compute.provider_tree [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.194222] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063328} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.195172] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1298.195971] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95323d5-f80f-48f6-b667-214ca98cad6a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.218301] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] abce5547-9f0e-4fd8-a44c-23aef12390d7/abce5547-9f0e-4fd8-a44c-23aef12390d7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.218861] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db31f300-27f2-4e1d-bfa7-19b7871f3966 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.238720] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1298.238720] env[62914]: value = "task-4832940" [ 1298.238720] env[62914]: _type = "Task" [ 1298.238720] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.248154] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.378017] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-f35f819b-276a-4dc4-9cda-9b9a02d16c02" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.378368] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 1298.378498] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.378717] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.378866] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1298.519451] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832939, 'name': ReconfigVM_Task, 'duration_secs': 0.36031} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.519754] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfigured VM instance instance-0000007b to attach disk [datastore2] volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84/volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1298.524612] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-059d6dae-9a43-49d9-b7e7-11651b38c9ae {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.540524] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1298.540524] env[62914]: value = "task-4832941" [ 1298.540524] env[62914]: _type = "Task" [ 1298.540524] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.550075] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832941, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.690766] env[62914]: DEBUG nova.scheduler.client.report [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1298.749267] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832940, 'name': ReconfigVM_Task, 'duration_secs': 0.310983} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.749643] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfigured VM instance instance-0000007c to attach disk [datastore2] abce5547-9f0e-4fd8-a44c-23aef12390d7/abce5547-9f0e-4fd8-a44c-23aef12390d7.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1298.750369] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-654bf585-3736-41f7-bb81-f94c7d2be8cb {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.757855] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1298.757855] env[62914]: value = "task-4832942" [ 1298.757855] env[62914]: _type = "Task" [ 1298.757855] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.766575] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832942, 'name': Rename_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.054033] env[62914]: DEBUG oslo_vmware.api [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832941, 'name': ReconfigVM_Task, 'duration_secs': 0.150615} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.054033] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942108', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'name': 'volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f35f819b-276a-4dc4-9cda-9b9a02d16c02', 'attached_at': '', 'detached_at': '', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'serial': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1299.195721] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.196382] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Start building networks asynchronously for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1299.271099] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832942, 'name': Rename_Task} progress is 14%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.702223] env[62914]: DEBUG nova.compute.utils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1299.703726] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Allocating IP information in the background. {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1299.703938] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] allocate_for_instance() {{(pid=62914) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1299.769037] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832942, 'name': Rename_Task, 'duration_secs': 0.869173} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.769351] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1299.769634] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d501b73-aeed-4552-9e24-e3de050f8e85 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.777702] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1299.777702] env[62914]: value = "task-4832943" [ 1299.777702] env[62914]: _type = "Task" [ 1299.777702] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.786157] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.799753] env[62914]: DEBUG nova.policy [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4f1342629ac4aee802a2b69a5459827', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ae1b7abf6f24eccb2b44d82687deb76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62914) authorize /opt/stack/nova/nova/policy.py:201}} [ 1300.101973] env[62914]: DEBUG nova.objects.instance [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid f35f819b-276a-4dc4-9cda-9b9a02d16c02 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1300.104120] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Successfully created port: 33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1300.209516] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Start building block device mappings for instance. {{(pid=62914) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1300.288336] env[62914]: DEBUG oslo_vmware.api [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832943, 'name': PowerOnVM_Task, 'duration_secs': 0.45005} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.288336] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1300.288590] env[62914]: INFO nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1300.288640] env[62914]: DEBUG nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1300.289421] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea3609c-7176-4b28-b872-f1b3c6effe16 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.567870] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.608958] env[62914]: DEBUG oslo_concurrency.lockutils [None req-76a46274-d842-4a1a-982b-725620d60dd8 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.278s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.808182] env[62914]: INFO nova.compute.manager [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Took 13.29 seconds to build instance. [ 1300.827471] env[62914]: DEBUG oslo_concurrency.lockutils [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.827674] env[62914]: DEBUG oslo_concurrency.lockutils [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.071519] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.071755] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.071923] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.072114] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1301.073059] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfc2ff9-10f1-4f52-9ca8-ef8d5be4ad8c {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.081986] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee404d0a-6669-4181-8ac9-b1e3299b6c0b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.099760] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38ba4d0-f4c2-4959-a69b-febb491e066a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.107722] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38e4d00-cbf0-4ff7-9a77-e4e00d97ba83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.138226] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179841MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1301.138429] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.138637] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.222497] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Start spawning the instance on the hypervisor. {{(pid=62914) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1301.253816] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-25T11:20:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-25T11:20:44Z,direct_url=,disk_format='vmdk',id=75c43660-b52b-450e-ba36-0f721e14bc6c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eed5cec4d5bf40c1ae2fdf52f374b697',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-25T11:20:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1301.254122] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1301.254296] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image limits 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.254488] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Flavor pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1301.254643] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Image pref 0:0:0 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.254798] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62914) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1301.255017] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1301.255187] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1301.255380] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Got 1 possible topologies {{(pid=62914) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1301.255554] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1301.255733] env[62914]: DEBUG nova.virt.hardware [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62914) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.256608] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d8366c-d3e3-4116-845f-d5e3c5d6118f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.264700] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e925771c-6c7c-4dc3-918b-082cdae23d99 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.310942] env[62914]: DEBUG oslo_concurrency.lockutils [None req-0704351d-0f1f-4e7b-839f-cfdd330e7500 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.801s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.331136] env[62914]: INFO nova.compute.manager [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Detaching volume 4f197398-0ea7-4394-a22f-f6f4a1b8fe84 [ 1301.371582] env[62914]: INFO nova.virt.block_device [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Attempting to driver detach volume 4f197398-0ea7-4394-a22f-f6f4a1b8fe84 from mountpoint /dev/sdb [ 1301.371955] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1301.372207] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942108', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'name': 'volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f35f819b-276a-4dc4-9cda-9b9a02d16c02', 'attached_at': '', 'detached_at': '', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'serial': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1301.373184] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cecf61-ff8e-4bc1-a0fb-5e531fcc64c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.397192] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba304a1-6a25-4722-9a96-009bd987412a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.404758] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c421ac-f752-42ea-8b8e-828001bdc62e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.426149] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd03e8f3-b08c-4dcc-a14a-a51337f841cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.442654] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] The volume has not been displaced from its original location: [datastore2] volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84/volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1301.448055] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1301.448422] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcecde21-04a3-4593-a730-0c1add03c5ab {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.467865] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1301.467865] env[62914]: value = "task-4832944" [ 1301.467865] env[62914]: _type = "Task" [ 1301.467865] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.476797] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.602251] env[62914]: DEBUG nova.compute.manager [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Received event network-vif-plugged-33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1301.602559] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] Acquiring lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.602835] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] Lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.603287] env[62914]: DEBUG oslo_concurrency.lockutils [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] Lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.603550] env[62914]: DEBUG nova.compute.manager [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] No waiting events found dispatching network-vif-plugged-33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1301.603797] env[62914]: WARNING nova.compute.manager [req-7c11a42c-09e9-4640-8a58-f96250bf2c9b req-15dafc8c-5eaf-43ac-81b2-66bbfee69185 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Received unexpected event network-vif-plugged-33aba8b9-6e20-4d5c-9d40-5ce1885662fe for instance with vm_state building and task_state spawning. [ 1301.698819] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Successfully updated port: 33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1301.975166] env[62914]: DEBUG nova.compute.manager [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Received event network-changed-b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1301.975435] env[62914]: DEBUG nova.compute.manager [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Refreshing instance network info cache due to event network-changed-b5f7a871-c81e-497e-9960-b3b7d7981318. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1301.975664] env[62914]: DEBUG oslo_concurrency.lockutils [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] Acquiring lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.975811] env[62914]: DEBUG oslo_concurrency.lockutils [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] Acquired lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.975977] env[62914]: DEBUG nova.network.neutron [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Refreshing network info cache for port b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1301.984020] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832944, 'name': ReconfigVM_Task, 'duration_secs': 0.282832} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.984020] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1301.989428] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7e037ce-d295-4a19-938d-45ea5ac430af {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.005943] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1302.005943] env[62914]: value = "task-4832945" [ 1302.005943] env[62914]: _type = "Task" [ 1302.005943] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.015210] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832945, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.166600] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance f35f819b-276a-4dc4-9cda-9b9a02d16c02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1302.166756] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance abce5547-9f0e-4fd8-a44c-23aef12390d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1302.166883] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance a4a9a045-48e6-4f6e-80b6-437436346052 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1302.167081] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1302.167238] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=100GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '3', 'num_vm_active': '2', 'num_task_None': '2', 'num_os_type_None': '3', 'num_proj_b59bf6daf8c246f7b034dc0adcfc8cde': '1', 'io_workload': '1', 'num_proj_1780142384594b1dabc6811b54144d56': '1', 'num_vm_building': '1', 'num_task_spawning': '1', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1302.201614] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.201773] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.201931] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1302.221617] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e44166-3e26-4d56-81b3-e609677470aa {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.230090] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ee30cb-e891-4f2d-9388-63155516d65d {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.263626] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b67754-9f8f-4f22-85fd-7ab65ebf6b84 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.272382] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11765831-4cba-4ac2-a001-cda7e35024ec {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.287525] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.518235] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.708446] env[62914]: DEBUG nova.network.neutron [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updated VIF entry in instance network info cache for port b5f7a871-c81e-497e-9960-b3b7d7981318. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1302.708841] env[62914]: DEBUG nova.network.neutron [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [{"id": "b5f7a871-c81e-497e-9960-b3b7d7981318", "address": "fa:16:3e:89:25:e0", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5f7a871-c8", "ovs_interfaceid": "b5f7a871-c81e-497e-9960-b3b7d7981318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.735871] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Instance cache missing network info. {{(pid=62914) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1302.790506] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.880875] env[62914]: DEBUG nova.network.neutron [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.018294] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.212188] env[62914]: DEBUG oslo_concurrency.lockutils [req-bad3b474-12d9-45fe-98ef-9a3e24d55cc6 req-69983575-9913-4893-a59b-3235d8a75737 service nova] Releasing lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.296219] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1303.296690] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.158s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.382915] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.383303] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Instance network_info: |[{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62914) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1303.383838] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:45:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c9a12d2-469f-4199-bfaa-f791d765deac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33aba8b9-6e20-4d5c-9d40-5ce1885662fe', 'vif_model': 'vmxnet3'}] {{(pid=62914) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1303.391625] env[62914]: DEBUG oslo.service.loopingcall [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1303.391891] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Creating VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1303.392153] env[62914]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da529021-00ea-4870-a4c1-b7d736ef14ef {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.412723] env[62914]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.412723] env[62914]: value = "task-4832946" [ 1303.412723] env[62914]: _type = "Task" [ 1303.412723] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.420490] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832946, 'name': CreateVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.520298] env[62914]: DEBUG oslo_vmware.api [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832945, 'name': ReconfigVM_Task, 'duration_secs': 1.119801} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.520631] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942108', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'name': 'volume-4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f35f819b-276a-4dc4-9cda-9b9a02d16c02', 'attached_at': '', 'detached_at': '', 'volume_id': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84', 'serial': '4f197398-0ea7-4394-a22f-f6f4a1b8fe84'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1303.636414] env[62914]: DEBUG nova.compute.manager [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Received event network-changed-33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1303.636414] env[62914]: DEBUG nova.compute.manager [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Refreshing instance network info cache due to event network-changed-33aba8b9-6e20-4d5c-9d40-5ce1885662fe. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1303.636414] env[62914]: DEBUG oslo_concurrency.lockutils [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] Acquiring lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.636414] env[62914]: DEBUG oslo_concurrency.lockutils [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] Acquired lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.636414] env[62914]: DEBUG nova.network.neutron [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Refreshing network info cache for port 33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1303.922499] env[62914]: DEBUG oslo_vmware.api [-] Task: {'id': task-4832946, 'name': CreateVM_Task, 'duration_secs': 0.301173} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.922695] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Created VM on the ESX host {{(pid=62914) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1303.923413] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.923608] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.923947] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1303.924216] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7dffff2-eb44-4ee8-a519-bcd141a841fc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.928761] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1303.928761] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]525db903-3599-3ceb-086a-ed22df51bbfb" [ 1303.928761] env[62914]: _type = "Task" [ 1303.928761] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.936784] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525db903-3599-3ceb-086a-ed22df51bbfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.069804] env[62914]: DEBUG nova.objects.instance [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'flavor' on Instance uuid f35f819b-276a-4dc4-9cda-9b9a02d16c02 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1304.296478] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.297047] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.297047] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.297176] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.297239] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.350079] env[62914]: DEBUG nova.network.neutron [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updated VIF entry in instance network info cache for port 33aba8b9-6e20-4d5c-9d40-5ce1885662fe. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1304.350528] env[62914]: DEBUG nova.network.neutron [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.438980] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]525db903-3599-3ceb-086a-ed22df51bbfb, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.439293] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.439529] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Processing image 75c43660-b52b-450e-ba36-0f721e14bc6c {{(pid=62914) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.439770] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.439921] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.440122] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.440399] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6eac4ee-e4e5-4d1f-ac9b-9aeae86cb9c6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.449119] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62914) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.449323] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62914) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1304.450030] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36dcaec1-0681-4f41-839b-15173a39dcd8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.455233] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1304.455233] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]521ee7d2-201a-0523-fa8e-785757097c64" [ 1304.455233] env[62914]: _type = "Task" [ 1304.455233] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.462708] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521ee7d2-201a-0523-fa8e-785757097c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.853922] env[62914]: DEBUG oslo_concurrency.lockutils [req-afd353bc-7d55-4185-89a0-7f330fa79316 req-ef3e059d-f834-47d2-ac05-770b4aa15393 service nova] Releasing lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.966923] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]521ee7d2-201a-0523-fa8e-785757097c64, 'name': SearchDatastore_Task, 'duration_secs': 0.009071} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.967522] env[62914]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f01403-3f5b-4e53-9092-df05e08ecb2b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.973547] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1304.973547] env[62914]: value = "session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a2523-fb89-da4f-77a1-f66da69a46c1" [ 1304.973547] env[62914]: _type = "Task" [ 1304.973547] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.981856] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a2523-fb89-da4f-77a1-f66da69a46c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.076990] env[62914]: DEBUG oslo_concurrency.lockutils [None req-df8bedf6-36b7-4380-8890-c6c8985a73e5 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.249s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.484083] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': session[52d52040-ced2-7a98-19c6-f97f142d02ee]523a2523-fb89-da4f-77a1-f66da69a46c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009813} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.484431] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.484625] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] a4a9a045-48e6-4f6e-80b6-437436346052/a4a9a045-48e6-4f6e-80b6-437436346052.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1305.484890] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32847de5-1444-4786-90ff-a34a3d3764a4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.492294] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1305.492294] env[62914]: value = "task-4832947" [ 1305.492294] env[62914]: _type = "Task" [ 1305.492294] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.499993] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.002714] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455555} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.002893] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75c43660-b52b-450e-ba36-0f721e14bc6c/75c43660-b52b-450e-ba36-0f721e14bc6c.vmdk to [datastore2] a4a9a045-48e6-4f6e-80b6-437436346052/a4a9a045-48e6-4f6e-80b6-437436346052.vmdk {{(pid=62914) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1306.003138] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Extending root virtual disk to 1048576 {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1306.003507] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8901ab6-ac79-48a6-a94b-5832951a7f67 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.010391] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1306.010391] env[62914]: value = "task-4832948" [ 1306.010391] env[62914]: _type = "Task" [ 1306.010391] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.017957] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832948, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.158153] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.158469] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.158696] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.158890] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.159081] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.161432] env[62914]: INFO nova.compute.manager [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Terminating instance [ 1306.163309] env[62914]: DEBUG nova.compute.manager [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1306.163504] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1306.164334] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef96464d-5bb2-475f-946b-3a47fee9e83f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.172216] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1306.172491] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23e20d83-1a6a-429b-b3cf-8250704a4e83 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.179072] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1306.179072] env[62914]: value = "task-4832949" [ 1306.179072] env[62914]: _type = "Task" [ 1306.179072] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.187222] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.521276] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832948, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062524} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.521641] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Extended root virtual disk {{(pid=62914) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1306.522371] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfce042b-2e9f-4889-899b-56e5f84f2430 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.544114] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] a4a9a045-48e6-4f6e-80b6-437436346052/a4a9a045-48e6-4f6e-80b6-437436346052.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1306.544414] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c6adc4f-f920-439f-b86d-218b25a353d2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.563979] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1306.563979] env[62914]: value = "task-4832950" [ 1306.563979] env[62914]: _type = "Task" [ 1306.563979] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.571975] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.689815] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832949, 'name': PowerOffVM_Task, 'duration_secs': 0.171045} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.690177] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1306.690417] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1306.690718] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-402a4aa9-0dd4-47a2-ad30-41d4672a7694 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.757362] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1306.757581] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1306.757774] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleting the datastore file [datastore2] f35f819b-276a-4dc4-9cda-9b9a02d16c02 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1306.758077] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-327cdfad-d8aa-4893-a826-3a88284957e1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.765625] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for the task: (returnval){ [ 1306.765625] env[62914]: value = "task-4832952" [ 1306.765625] env[62914]: _type = "Task" [ 1306.765625] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.774613] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.075039] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832950, 'name': ReconfigVM_Task, 'duration_secs': 0.307595} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.075348] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Reconfigured VM instance instance-0000007d to attach disk [datastore2] a4a9a045-48e6-4f6e-80b6-437436346052/a4a9a045-48e6-4f6e-80b6-437436346052.vmdk or device None with type sparse {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1307.076052] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f34051a-484f-4978-a8a4-076dcd0aee42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.083667] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1307.083667] env[62914]: value = "task-4832953" [ 1307.083667] env[62914]: _type = "Task" [ 1307.083667] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.096874] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832953, 'name': Rename_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.276225] env[62914]: DEBUG oslo_vmware.api [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Task: {'id': task-4832952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149766} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.276495] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1307.276699] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1307.276895] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1307.277101] env[62914]: INFO nova.compute.manager [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1307.277369] env[62914]: DEBUG oslo.service.loopingcall [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.277578] env[62914]: DEBUG nova.compute.manager [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1307.277691] env[62914]: DEBUG nova.network.neutron [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1307.595539] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832953, 'name': Rename_Task, 'duration_secs': 0.136119} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.595918] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1307.596070] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10e17aa8-53e3-42d2-8345-e9a27bea3172 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.602863] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1307.602863] env[62914]: value = "task-4832954" [ 1307.602863] env[62914]: _type = "Task" [ 1307.602863] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.613934] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.019897] env[62914]: DEBUG nova.compute.manager [req-8bc87375-995e-40b9-84af-1c51f8f166f2 req-17e5ebf7-4c50-46b1-a634-0b5b8c62df83 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Received event network-vif-deleted-1555b103-122e-466e-838b-ce49a0c203ae {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1308.020128] env[62914]: INFO nova.compute.manager [req-8bc87375-995e-40b9-84af-1c51f8f166f2 req-17e5ebf7-4c50-46b1-a634-0b5b8c62df83 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Neutron deleted interface 1555b103-122e-466e-838b-ce49a0c203ae; detaching it from the instance and deleting it from the info cache [ 1308.020392] env[62914]: DEBUG nova.network.neutron [req-8bc87375-995e-40b9-84af-1c51f8f166f2 req-17e5ebf7-4c50-46b1-a634-0b5b8c62df83 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.112519] env[62914]: DEBUG oslo_vmware.api [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832954, 'name': PowerOnVM_Task, 'duration_secs': 0.495052} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.112741] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1308.112946] env[62914]: INFO nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Took 6.89 seconds to spawn the instance on the hypervisor. [ 1308.113145] env[62914]: DEBUG nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1308.113992] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fbfc4d-4262-4895-992b-345527304101 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.493945] env[62914]: DEBUG nova.network.neutron [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.524195] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea962ab6-d334-4ea9-b0a5-d89ae7ade8c9 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.534854] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400a7715-ac72-4261-aa31-4a1ceca1d316 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.567773] env[62914]: DEBUG nova.compute.manager [req-8bc87375-995e-40b9-84af-1c51f8f166f2 req-17e5ebf7-4c50-46b1-a634-0b5b8c62df83 service nova] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Detach interface failed, port_id=1555b103-122e-466e-838b-ce49a0c203ae, reason: Instance f35f819b-276a-4dc4-9cda-9b9a02d16c02 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1308.633565] env[62914]: INFO nova.compute.manager [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Took 11.60 seconds to build instance. [ 1309.000455] env[62914]: INFO nova.compute.manager [-] [instance: f35f819b-276a-4dc4-9cda-9b9a02d16c02] Took 1.72 seconds to deallocate network for instance. [ 1309.128442] env[62914]: DEBUG nova.compute.manager [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Received event network-changed-33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1309.128442] env[62914]: DEBUG nova.compute.manager [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Refreshing instance network info cache due to event network-changed-33aba8b9-6e20-4d5c-9d40-5ce1885662fe. {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11212}} [ 1309.128442] env[62914]: DEBUG oslo_concurrency.lockutils [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] Acquiring lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.128586] env[62914]: DEBUG oslo_concurrency.lockutils [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] Acquired lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.128691] env[62914]: DEBUG nova.network.neutron [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Refreshing network info cache for port 33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1309.134834] env[62914]: DEBUG oslo_concurrency.lockutils [None req-788ee526-28cf-4f5e-9cd9-cc68b3580a3f tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.110s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.509072] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.509327] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.509533] env[62914]: DEBUG nova.objects.instance [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lazy-loading 'resources' on Instance uuid f35f819b-276a-4dc4-9cda-9b9a02d16c02 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1309.841543] env[62914]: DEBUG nova.network.neutron [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updated VIF entry in instance network info cache for port 33aba8b9-6e20-4d5c-9d40-5ce1885662fe. {{(pid=62914) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1309.841930] env[62914]: DEBUG nova.network.neutron [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.074591] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f73003f-8bd7-40d0-921a-15093f7a8eea {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.082532] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db5a3ef-85c6-422f-9ba5-da818705fa15 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.111966] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cdbf96-dea4-4000-9076-15a36e725607 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.120047] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a12b55-88c0-409d-956a-36d904000948 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.133973] env[62914]: DEBUG nova.compute.provider_tree [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.345448] env[62914]: DEBUG oslo_concurrency.lockutils [req-3e0286d3-ae5d-4389-ba10-66c71c50d5fa req-fabafba5-c0a5-4a27-844d-410cb240eccb service nova] Releasing lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.637746] env[62914]: DEBUG nova.scheduler.client.report [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.143301] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.166419] env[62914]: INFO nova.scheduler.client.report [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Deleted allocations for instance f35f819b-276a-4dc4-9cda-9b9a02d16c02 [ 1311.674763] env[62914]: DEBUG oslo_concurrency.lockutils [None req-fc5ccc01-ebaf-4224-98e5-66200d66a5f1 tempest-AttachVolumeNegativeTest-515169220 tempest-AttachVolumeNegativeTest-515169220-project-member] Lock "f35f819b-276a-4dc4-9cda-9b9a02d16c02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.516s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.158226] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.158554] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.662084] env[62914]: DEBUG nova.compute.utils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1340.165133] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.229494] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.229911] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.230057] env[62914]: INFO nova.compute.manager [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attaching volume 3ef68b81-d6a6-44df-be49-84d99e734ad0 to /dev/sdb [ 1341.260858] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74725395-f2c7-412a-b1e3-46120c712620 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.268426] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50a2c99-51f4-4656-a4f4-9e86c30b7347 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.281563] env[62914]: DEBUG nova.virt.block_device [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating existing volume attachment record: ff292027-beb1-4d92-a2f9-f1eed2c31cc2 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1345.825997] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1345.826272] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942111', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'name': 'volume-3ef68b81-d6a6-44df-be49-84d99e734ad0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'serial': '3ef68b81-d6a6-44df-be49-84d99e734ad0'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1345.827192] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd477fef-aa84-45a1-b33a-cc7b22f695cd {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.844876] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcd2fc4-9b5c-4cf0-a542-353153c351cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.870212] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-3ef68b81-d6a6-44df-be49-84d99e734ad0/volume-3ef68b81-d6a6-44df-be49-84d99e734ad0.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1345.870531] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e215defa-810b-4f52-a738-53133a510098 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.888704] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1345.888704] env[62914]: value = "task-4832960" [ 1345.888704] env[62914]: _type = "Task" [ 1345.888704] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.897246] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.400106] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832960, 'name': ReconfigVM_Task, 'duration_secs': 0.322148} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.400458] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-3ef68b81-d6a6-44df-be49-84d99e734ad0/volume-3ef68b81-d6a6-44df-be49-84d99e734ad0.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1346.405219] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e36e90b9-bc9d-426c-96dd-208be15d128f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.421171] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1346.421171] env[62914]: value = "task-4832961" [ 1346.421171] env[62914]: _type = "Task" [ 1346.421171] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.429312] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.931751] env[62914]: DEBUG oslo_vmware.api [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832961, 'name': ReconfigVM_Task, 'duration_secs': 0.134673} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.932280] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942111', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'name': 'volume-3ef68b81-d6a6-44df-be49-84d99e734ad0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'serial': '3ef68b81-d6a6-44df-be49-84d99e734ad0'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1347.526763] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "a4a9a045-48e6-4f6e-80b6-437436346052" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.527039] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.527230] env[62914]: DEBUG nova.compute.manager [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1347.528182] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0541ed0-4af8-41a3-b2dc-aef5e8fc9c06 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.535197] env[62914]: DEBUG nova.compute.manager [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62914) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1347.535800] env[62914]: DEBUG nova.objects.instance [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'flavor' on Instance uuid a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.978108] env[62914]: DEBUG nova.objects.instance [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1348.042756] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1348.043075] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-979408e9-a917-4ddd-a9fd-ce97ab9b15d4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.050713] env[62914]: DEBUG oslo_vmware.api [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1348.050713] env[62914]: value = "task-4832962" [ 1348.050713] env[62914]: _type = "Task" [ 1348.050713] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.060747] env[62914]: DEBUG oslo_vmware.api [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.483430] env[62914]: DEBUG oslo_concurrency.lockutils [None req-3057f797-6554-46eb-8bf4-c641c6574571 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.560320] env[62914]: DEBUG oslo_vmware.api [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832962, 'name': PowerOffVM_Task, 'duration_secs': 0.190049} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.560691] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1348.560930] env[62914]: DEBUG nova.compute.manager [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1348.561765] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb00fe07-edf3-42c9-b5f8-9a84173fe3f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.074749] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bd5e4575-9756-4401-86fa-0dfab7c418f9 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.357112] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.357362] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.860965] env[62914]: DEBUG nova.compute.utils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Using /dev/sd instead of None {{(pid=62914) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1350.004028] env[62914]: DEBUG nova.objects.instance [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'flavor' on Instance uuid a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.364456] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.509399] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.509609] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.509794] env[62914]: DEBUG nova.network.neutron [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1350.510046] env[62914]: DEBUG nova.objects.instance [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'info_cache' on Instance uuid a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1351.013650] env[62914]: DEBUG nova.objects.base [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62914) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1351.437931] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.438367] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.438546] env[62914]: INFO nova.compute.manager [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attaching volume c4b46403-cdd1-467a-946c-436d24e09a65 to /dev/sdc [ 1351.469073] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2a65b5-e61b-4201-9ebc-2f1e36274b3f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.476888] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab512716-2111-43e8-84c6-2f2b4a4e2472 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.490383] env[62914]: DEBUG nova.virt.block_device [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating existing volume attachment record: fd6c35ad-b7e9-4576-8167-9a8252ef5e67 {{(pid=62914) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1351.768643] env[62914]: DEBUG nova.network.neutron [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.271272] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.775437] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powering on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1352.775871] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12c39783-1d7f-419f-979e-339f9e345ba6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.783900] env[62914]: DEBUG oslo_vmware.api [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1352.783900] env[62914]: value = "task-4832964" [ 1352.783900] env[62914]: _type = "Task" [ 1352.783900] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.792132] env[62914]: DEBUG oslo_vmware.api [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832964, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.294296] env[62914]: DEBUG oslo_vmware.api [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832964, 'name': PowerOnVM_Task, 'duration_secs': 0.446835} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.294577] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powered on the VM {{(pid=62914) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1353.294763] env[62914]: DEBUG nova.compute.manager [None req-2dc48651-597f-4d7a-b0da-1593106b4403 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1353.295544] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3819ff5d-c49e-4499-9bf1-f38ffa2d6b51 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.809403] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a074e7-d139-4f81-af84-d81c26d04521 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.816566] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Suspending the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1354.816878] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6e58e554-d1f5-4a12-8035-6d0f0871b6be {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.823514] env[62914]: DEBUG oslo_vmware.api [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1354.823514] env[62914]: value = "task-4832966" [ 1354.823514] env[62914]: _type = "Task" [ 1354.823514] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.832682] env[62914]: DEBUG oslo_vmware.api [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832966, 'name': SuspendVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.334110] env[62914]: DEBUG oslo_vmware.api [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832966, 'name': SuspendVM_Task} progress is 66%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.836770] env[62914]: DEBUG oslo_vmware.api [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832966, 'name': SuspendVM_Task, 'duration_secs': 0.737267} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.837188] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Suspended the VM {{(pid=62914) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1355.837314] env[62914]: DEBUG nova.compute.manager [None req-17f2f45b-51e2-4354-bb8d-e79357d85286 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1355.838078] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccbfd4f-e878-4004-934f-0f3d975dd2c7 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.035397] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Volume attach. Driver type: vmdk {{(pid=62914) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1356.035654] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942112', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'name': 'volume-c4b46403-cdd1-467a-946c-436d24e09a65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'serial': 'c4b46403-cdd1-467a-946c-436d24e09a65'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1356.036599] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9768810-bf82-433f-a212-4b2221fe84cc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.053856] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5aaebc-35e9-4d81-b737-15a776defe76 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.081301] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-c4b46403-cdd1-467a-946c-436d24e09a65/volume-c4b46403-cdd1-467a-946c-436d24e09a65.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1356.081619] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ae7a6f9-52f0-486f-930a-2803132923c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.100575] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1356.100575] env[62914]: value = "task-4832967" [ 1356.100575] env[62914]: _type = "Task" [ 1356.100575] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.108973] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.610760] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832967, 'name': ReconfigVM_Task, 'duration_secs': 0.367563} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.611079] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-c4b46403-cdd1-467a-946c-436d24e09a65/volume-c4b46403-cdd1-467a-946c-436d24e09a65.vmdk or device None with type thin {{(pid=62914) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1356.615859] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d1c3212-6d8d-44d9-be5e-2195629db387 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.631857] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1356.631857] env[62914]: value = "task-4832968" [ 1356.631857] env[62914]: _type = "Task" [ 1356.631857] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.640681] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832968, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.141814] env[62914]: DEBUG oslo_vmware.api [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832968, 'name': ReconfigVM_Task, 'duration_secs': 0.13586} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.142190] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942112', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'name': 'volume-c4b46403-cdd1-467a-946c-436d24e09a65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'serial': 'c4b46403-cdd1-467a-946c-436d24e09a65'} {{(pid=62914) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1357.150524] env[62914]: INFO nova.compute.manager [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Resuming [ 1357.151118] env[62914]: DEBUG nova.objects.instance [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'flavor' on Instance uuid a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1357.567243] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1357.567470] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Starting heal instance info cache {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10004}} [ 1357.567598] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Rebuilding the list of instances to heal {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10008}} [ 1358.098711] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.098868] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquired lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.099024] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Forcefully refreshing network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1358.099181] env[62914]: DEBUG nova.objects.instance [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lazy-loading 'info_cache' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.159055] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.159442] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquired lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.159442] env[62914]: DEBUG nova.network.neutron [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Building network info cache for instance {{(pid=62914) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1358.181048] env[62914]: DEBUG nova.objects.instance [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.686714] env[62914]: DEBUG oslo_concurrency.lockutils [None req-14c8de3a-de66-4734-9f8f-a1ae23d1830b tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.879781] env[62914]: DEBUG nova.network.neutron [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [{"id": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "address": "fa:16:3e:fb:45:af", "network": {"id": "27264368-337d-49b2-85fa-a36fc7410e66", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-872344487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ae1b7abf6f24eccb2b44d82687deb76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c9a12d2-469f-4199-bfaa-f791d765deac", "external-id": "nsx-vlan-transportzone-96", "segmentation_id": 96, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33aba8b9-6e", "ovs_interfaceid": "33aba8b9-6e20-4d5c-9d40-5ce1885662fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.975147] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.975524] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.382449] env[62914]: DEBUG oslo_concurrency.lockutils [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Releasing lock "refresh_cache-a4a9a045-48e6-4f6e-80b6-437436346052" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.383455] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c740303e-a692-463b-8221-f3799e8dcb56 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.390651] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Resuming the VM {{(pid=62914) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1359.390903] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf33713f-bcec-48b3-b0cc-a1c056d8d2c0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.397401] env[62914]: DEBUG oslo_vmware.api [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1359.397401] env[62914]: value = "task-4832969" [ 1359.397401] env[62914]: _type = "Task" [ 1359.397401] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.405059] env[62914]: DEBUG oslo_vmware.api [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.479272] env[62914]: INFO nova.compute.manager [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Detaching volume 3ef68b81-d6a6-44df-be49-84d99e734ad0 [ 1359.510530] env[62914]: INFO nova.virt.block_device [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attempting to driver detach volume 3ef68b81-d6a6-44df-be49-84d99e734ad0 from mountpoint /dev/sdb [ 1359.510849] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1359.511106] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942111', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'name': 'volume-3ef68b81-d6a6-44df-be49-84d99e734ad0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'serial': '3ef68b81-d6a6-44df-be49-84d99e734ad0'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1359.513431] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093584b7-f0de-4216-bcc4-4f9d54dd166a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.538811] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b28ed1b-6662-435c-8091-6be7de8fb0ee {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.546136] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0479b2-4d70-4590-b117-205f9842b0c4 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.570170] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2050068a-b2dd-485a-b704-9d26704380d3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.586684] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] The volume has not been displaced from its original location: [datastore2] volume-3ef68b81-d6a6-44df-be49-84d99e734ad0/volume-3ef68b81-d6a6-44df-be49-84d99e734ad0.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1359.591917] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1359.592248] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0020c509-d824-4789-a9ed-35b22065575f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.612949] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1359.612949] env[62914]: value = "task-4832970" [ 1359.612949] env[62914]: _type = "Task" [ 1359.612949] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.621629] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.860378] env[62914]: DEBUG nova.network.neutron [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [{"id": "b5f7a871-c81e-497e-9960-b3b7d7981318", "address": "fa:16:3e:89:25:e0", "network": {"id": "be2378f7-ba73-4681-8d70-293afda2e4f6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1953272217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1780142384594b1dabc6811b54144d56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5f7a871-c8", "ovs_interfaceid": "b5f7a871-c81e-497e-9960-b3b7d7981318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.909788] env[62914]: DEBUG oslo_vmware.api [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832969, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.122861] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832970, 'name': ReconfigVM_Task, 'duration_secs': 0.261296} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.123144] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1360.127802] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3b0b0bf-c2bb-450f-9cef-a1e8f28d10a6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.144389] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1360.144389] env[62914]: value = "task-4832971" [ 1360.144389] env[62914]: _type = "Task" [ 1360.144389] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.153419] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832971, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.362838] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Releasing lock "refresh_cache-abce5547-9f0e-4fd8-a44c-23aef12390d7" {{(pid=62914) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.363093] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updated the network info_cache for instance {{(pid=62914) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10075}} [ 1360.363319] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.363497] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.363629] env[62914]: DEBUG nova.compute.manager [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62914) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10623}} [ 1360.407327] env[62914]: DEBUG oslo_vmware.api [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832969, 'name': PowerOnVM_Task, 'duration_secs': 0.516408} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.407682] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Resumed the VM {{(pid=62914) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1360.407795] env[62914]: DEBUG nova.compute.manager [None req-cdc05c46-8d1f-49c2-ab42-732c4fb8af05 tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Checking state {{(pid=62914) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1360.408597] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad3be85-75f6-4b27-84db-d9cb11054bc8 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.567024] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.654685] env[62914]: DEBUG oslo_vmware.api [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832971, 'name': ReconfigVM_Task, 'duration_secs': 0.142337} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.655016] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942111', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'name': 'volume-3ef68b81-d6a6-44df-be49-84d99e734ad0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': '3ef68b81-d6a6-44df-be49-84d99e734ad0', 'serial': '3ef68b81-d6a6-44df-be49-84d99e734ad0'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1361.071641] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1361.195676] env[62914]: DEBUG nova.objects.instance [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1361.575073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.575073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.575073] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.575626] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62914) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1361.576214] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcfbf49-8608-4d2b-94d3-69a68863cf7f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.585735] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbe36fc-d167-4ca5-9d51-00f5723d7320 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.600611] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c59229-7260-4876-86bd-6a21c9260d81 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.607917] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9434348-9952-4936-bdaf-78a2737bf4f6 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.637312] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180239MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62914) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1361.637482] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.637696] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.879974] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "a4a9a045-48e6-4f6e-80b6-437436346052" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.880286] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.880658] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.880887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.881146] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.883543] env[62914]: INFO nova.compute.manager [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Terminating instance [ 1361.885609] env[62914]: DEBUG nova.compute.manager [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1361.885831] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1361.886703] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16b3a67-cb77-4cfa-a22a-4e46553ae1dc {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.895256] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1361.895527] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71346f49-fb94-4ede-80db-1f5fea53d53f {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.902601] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1361.902601] env[62914]: value = "task-4832972" [ 1361.902601] env[62914]: _type = "Task" [ 1361.902601] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.913173] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.204057] env[62914]: DEBUG oslo_concurrency.lockutils [None req-027bfbe1-23d3-4d37-9907-ce07e37ca69d tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.229210] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.229503] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.412773] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832972, 'name': PowerOffVM_Task, 'duration_secs': 0.188804} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.413075] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1362.413268] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1362.413525] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eb98cf6-85f1-41c7-b28c-5762b7206874 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.476352] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1362.476654] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1362.476982] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleting the datastore file [datastore2] a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1362.477422] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11d05139-8f6a-4575-9041-f5516e237187 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.485259] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for the task: (returnval){ [ 1362.485259] env[62914]: value = "task-4832974" [ 1362.485259] env[62914]: _type = "Task" [ 1362.485259] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.493023] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.664628] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance abce5547-9f0e-4fd8-a44c-23aef12390d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1362.665018] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Instance a4a9a045-48e6-4f6e-80b6-437436346052 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62914) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1362.665018] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1362.665180] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=100GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '2', 'num_vm_active': '2', 'num_task_None': '1', 'num_os_type_None': '2', 'num_proj_1780142384594b1dabc6811b54144d56': '1', 'io_workload': '0', 'num_task_deleting': '1', 'num_proj_5ae1b7abf6f24eccb2b44d82687deb76': '1'} {{(pid=62914) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1362.704162] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cf291f-2b31-44e3-bfd7-035ebf0c4e27 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.712308] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b419c13f-f4e0-4c20-8cf0-407bdf2e6918 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.741951] env[62914]: INFO nova.compute.manager [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Detaching volume c4b46403-cdd1-467a-946c-436d24e09a65 [ 1362.745054] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009b7534-e65e-44d8-9252-2e75aca5040b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.754530] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce36ec9-5eb5-4847-91b0-f68a34073ce0 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.769532] env[62914]: DEBUG nova.compute.provider_tree [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.772258] env[62914]: INFO nova.virt.block_device [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Attempting to driver detach volume c4b46403-cdd1-467a-946c-436d24e09a65 from mountpoint /dev/sdc [ 1362.772519] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Volume detach. Driver type: vmdk {{(pid=62914) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1362.772715] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942112', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'name': 'volume-c4b46403-cdd1-467a-946c-436d24e09a65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'serial': 'c4b46403-cdd1-467a-946c-436d24e09a65'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1362.773519] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef922a5-444d-4cfe-bf8b-ce762f52a854 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.795320] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57233df9-34d6-4a07-ba98-0fed4bf5fa4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.803126] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd4c2ba-bfb0-4051-8370-9ab832503757 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.823551] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8711122-8b07-4b5c-a4ee-27986ab97023 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.839749] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] The volume has not been displaced from its original location: [datastore2] volume-c4b46403-cdd1-467a-946c-436d24e09a65/volume-c4b46403-cdd1-467a-946c-436d24e09a65.vmdk. No consolidation needed. {{(pid=62914) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1362.845402] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfiguring VM instance instance-0000007c to detach disk 2002 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1362.845854] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c94e4601-7130-4417-a31b-c69d04fde9a3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.864537] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1362.864537] env[62914]: value = "task-4832975" [ 1362.864537] env[62914]: _type = "Task" [ 1362.864537] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.873146] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.995451] env[62914]: DEBUG oslo_vmware.api [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Task: {'id': task-4832974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197504} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.995735] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.995880] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1362.996111] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.996305] env[62914]: INFO nova.compute.manager [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1362.996556] env[62914]: DEBUG oslo.service.loopingcall [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.996756] env[62914]: DEBUG nova.compute.manager [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1362.996932] env[62914]: DEBUG nova.network.neutron [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1363.272610] env[62914]: DEBUG nova.scheduler.client.report [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1363.374799] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832975, 'name': ReconfigVM_Task, 'duration_secs': 0.21843} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.375145] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Reconfigured VM instance instance-0000007c to detach disk 2002 {{(pid=62914) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1363.380173] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90fa2bab-dcbf-408a-91be-24a1a4bb0957 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.395519] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1363.395519] env[62914]: value = "task-4832976" [ 1363.395519] env[62914]: _type = "Task" [ 1363.395519] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.405627] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.496410] env[62914]: DEBUG nova.compute.manager [req-f1b4a0c7-5e5d-4b2f-92fb-78dab5619bd9 req-a9bfe8ae-43d9-404c-9061-19c6edb0f02c service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Received event network-vif-deleted-33aba8b9-6e20-4d5c-9d40-5ce1885662fe {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1363.496701] env[62914]: INFO nova.compute.manager [req-f1b4a0c7-5e5d-4b2f-92fb-78dab5619bd9 req-a9bfe8ae-43d9-404c-9061-19c6edb0f02c service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Neutron deleted interface 33aba8b9-6e20-4d5c-9d40-5ce1885662fe; detaching it from the instance and deleting it from the info cache [ 1363.496785] env[62914]: DEBUG nova.network.neutron [req-f1b4a0c7-5e5d-4b2f-92fb-78dab5619bd9 req-a9bfe8ae-43d9-404c-9061-19c6edb0f02c service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.777244] env[62914]: DEBUG nova.compute.resource_tracker [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62914) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1363.777635] env[62914]: DEBUG oslo_concurrency.lockutils [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.140s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.909224] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832976, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.968814] env[62914]: DEBUG nova.network.neutron [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.999713] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a94222a-a212-4ed4-b04f-f439c2481b40 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.009798] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d20a859-6021-4269-980d-e965ff4bfbb2 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.043838] env[62914]: DEBUG nova.compute.manager [req-f1b4a0c7-5e5d-4b2f-92fb-78dab5619bd9 req-a9bfe8ae-43d9-404c-9061-19c6edb0f02c service nova] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Detach interface failed, port_id=33aba8b9-6e20-4d5c-9d40-5ce1885662fe, reason: Instance a4a9a045-48e6-4f6e-80b6-437436346052 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1364.272905] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.273181] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.273332] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.406398] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832976, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.471228] env[62914]: INFO nova.compute.manager [-] [instance: a4a9a045-48e6-4f6e-80b6-437436346052] Took 1.47 seconds to deallocate network for instance. [ 1364.563626] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.567282] env[62914]: DEBUG oslo_service.periodic_task [None req-f00ee44e-199b-4bf4-9369-e3d2c4c68e0a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62914) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.906756] env[62914]: DEBUG oslo_vmware.api [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832976, 'name': ReconfigVM_Task, 'duration_secs': 1.130516} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.907163] env[62914]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-942112', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'name': 'volume-c4b46403-cdd1-467a-946c-436d24e09a65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'abce5547-9f0e-4fd8-a44c-23aef12390d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'c4b46403-cdd1-467a-946c-436d24e09a65', 'serial': 'c4b46403-cdd1-467a-946c-436d24e09a65'} {{(pid=62914) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1364.977568] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.977877] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.978131] env[62914]: DEBUG nova.objects.instance [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lazy-loading 'resources' on Instance uuid a4a9a045-48e6-4f6e-80b6-437436346052 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.451334] env[62914]: DEBUG nova.objects.instance [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'flavor' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.522295] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef045bc-545b-421e-9f76-d09eea035461 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.531549] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965e77d5-1c6b-4cc9-95ad-1e79f578ad42 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.561515] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6bf766-d0af-4658-b9bf-040d61614b07 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.569067] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc43bdc2-e3c2-495e-aac9-8c54b7c646a1 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.582049] env[62914]: DEBUG nova.compute.provider_tree [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.084733] env[62914]: DEBUG nova.scheduler.client.report [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1366.459447] env[62914]: DEBUG oslo_concurrency.lockutils [None req-bcc89ef0-dd68-42d6-a9c9-9e79a0dfaf4f tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.230s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.590140] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.613124] env[62914]: INFO nova.scheduler.client.report [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Deleted allocations for instance a4a9a045-48e6-4f6e-80b6-437436346052 [ 1367.120742] env[62914]: DEBUG oslo_concurrency.lockutils [None req-2e64335d-4b7d-4346-9209-9bd5ccb95cac tempest-ServerActionsTestJSON-1910611028 tempest-ServerActionsTestJSON-1910611028-project-member] Lock "a4a9a045-48e6-4f6e-80b6-437436346052" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.240s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.681205] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.681480] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.681695] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.681887] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.682109] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.684246] env[62914]: INFO nova.compute.manager [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Terminating instance [ 1367.686058] env[62914]: DEBUG nova.compute.manager [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Start destroying the instance on the hypervisor. {{(pid=62914) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1367.686299] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Destroying instance {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1367.687120] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d391f4-1652-4c00-89c4-d66c2187c5fe {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.694678] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Powering off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1367.694933] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39324ca6-b799-41b1-ae83-f77510388ce3 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.701837] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1367.701837] env[62914]: value = "task-4832977" [ 1367.701837] env[62914]: _type = "Task" [ 1367.701837] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.710525] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.212040] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832977, 'name': PowerOffVM_Task, 'duration_secs': 0.192329} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.212418] env[62914]: DEBUG nova.virt.vmwareapi.vm_util [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Powered off the VM {{(pid=62914) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1368.212462] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Unregistering the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1368.212704] env[62914]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c395881-ad7e-4544-9d23-31da90b83009 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.273317] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Unregistered the VM {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1368.273591] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Deleting contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1368.273791] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleting the datastore file [datastore2] abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.274174] env[62914]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbbe1a7d-e7ef-4291-84fa-682f60d27c6e {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.280747] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for the task: (returnval){ [ 1368.280747] env[62914]: value = "task-4832979" [ 1368.280747] env[62914]: _type = "Task" [ 1368.280747] env[62914]: } to complete. {{(pid=62914) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.288750] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.791377] env[62914]: DEBUG oslo_vmware.api [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Task: {'id': task-4832979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148436} completed successfully. {{(pid=62914) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.791666] env[62914]: DEBUG nova.virt.vmwareapi.ds_util [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleted the datastore file {{(pid=62914) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1368.791858] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Deleted contents of the VM from datastore datastore2 {{(pid=62914) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1368.792054] env[62914]: DEBUG nova.virt.vmwareapi.vmops [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Instance destroyed {{(pid=62914) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1368.792352] env[62914]: INFO nova.compute.manager [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1368.792612] env[62914]: DEBUG oslo.service.loopingcall [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62914) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.792815] env[62914]: DEBUG nova.compute.manager [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Deallocating network for instance {{(pid=62914) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1368.792910] env[62914]: DEBUG nova.network.neutron [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] deallocate_for_instance() {{(pid=62914) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1369.331291] env[62914]: DEBUG nova.compute.manager [req-0c4b0241-16d7-4240-b02d-058ee07dc4f2 req-ec74aff1-2582-4270-8313-260785013d5f service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Received event network-vif-deleted-b5f7a871-c81e-497e-9960-b3b7d7981318 {{(pid=62914) external_instance_event /opt/stack/nova/nova/compute/manager.py:11207}} [ 1369.331606] env[62914]: INFO nova.compute.manager [req-0c4b0241-16d7-4240-b02d-058ee07dc4f2 req-ec74aff1-2582-4270-8313-260785013d5f service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Neutron deleted interface b5f7a871-c81e-497e-9960-b3b7d7981318; detaching it from the instance and deleting it from the info cache [ 1369.331606] env[62914]: DEBUG nova.network.neutron [req-0c4b0241-16d7-4240-b02d-058ee07dc4f2 req-ec74aff1-2582-4270-8313-260785013d5f service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.801299] env[62914]: DEBUG nova.network.neutron [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Updating instance_info_cache with network_info: [] {{(pid=62914) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.833695] env[62914]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-265c74e8-5546-42c1-9291-c5e21a00c335 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.844545] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052a10ca-baaa-4fc9-927e-f7037e0b5e4b {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.872922] env[62914]: DEBUG nova.compute.manager [req-0c4b0241-16d7-4240-b02d-058ee07dc4f2 req-ec74aff1-2582-4270-8313-260785013d5f service nova] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Detach interface failed, port_id=b5f7a871-c81e-497e-9960-b3b7d7981318, reason: Instance abce5547-9f0e-4fd8-a44c-23aef12390d7 could not be found. {{(pid=62914) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11041}} [ 1370.303741] env[62914]: INFO nova.compute.manager [-] [instance: abce5547-9f0e-4fd8-a44c-23aef12390d7] Took 1.51 seconds to deallocate network for instance. [ 1370.810819] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.811187] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.811330] env[62914]: DEBUG nova.objects.instance [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lazy-loading 'resources' on Instance uuid abce5547-9f0e-4fd8-a44c-23aef12390d7 {{(pid=62914) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.348679] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1060fc-32af-4032-ba57-0e100ca96bff {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.356909] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0992d44c-36ce-4003-ad69-9cbb5787b6f5 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.386531] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067282e4-54c1-4b6d-bd0d-3e6e47d0f90a {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.394639] env[62914]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58a72b0-04f5-4f94-970f-b364385c7992 {{(pid=62914) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.407991] env[62914]: DEBUG nova.compute.provider_tree [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed in ProviderTree for provider: f2f7a014-852b-4b37-9610-c5761f4b0175 {{(pid=62914) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.911622] env[62914]: DEBUG nova.scheduler.client.report [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Inventory has not changed for provider f2f7a014-852b-4b37-9610-c5761f4b0175 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62914) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1372.416708] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.605s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.436385] env[62914]: INFO nova.scheduler.client.report [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Deleted allocations for instance abce5547-9f0e-4fd8-a44c-23aef12390d7 [ 1372.946334] env[62914]: DEBUG oslo_concurrency.lockutils [None req-a722fd95-f6ff-4887-89cf-612e524d5eb5 tempest-AttachVolumeTestJSON-255914310 tempest-AttachVolumeTestJSON-255914310-project-member] Lock "abce5547-9f0e-4fd8-a44c-23aef12390d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.265s {{(pid=62914) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}